diff --git a/daqviewer/daqviewer.py b/daqviewer/daqviewer.py index 9ced573..69316e9 100644 --- a/daqviewer/daqviewer.py +++ b/daqviewer/daqviewer.py @@ -2,24 +2,28 @@ import io import os import base64 import datetime +import numpy as np import pandas as pd import webbrowser as wb import dash -from dash.dependencies import Input, Output import dash_core_components as dcc import dash_html_components as html +from dash.dependencies import Input, Output import plotly.plotly as py import plotly.graph_objs as go +import wafo.objects as wo + app = dash.Dash() app.title = 'daqviewer' app.scripts.config.serve_locally = True app.layout = html.Div([ dcc.Upload( id='upload-data', - children=html.Div([html.A('Drag and drop csv files, or click to select.')]), + children=html.Div( + [html.A('Drag and drop csv files, or click to select.')]), style={ 'width': '99%', 'height': '60px', @@ -45,11 +49,10 @@ def parse_contents(contents, filename, date): inst_type = basename.split('_')[-1] try: if inst_type == 'WP': - df = pd.read_csv( - io.StringIO(decoded.decode('utf-8')), - index_col=0, - header=5, - skiprows=[6]) + df = pd.read_csv(io.StringIO(decoded.decode('utf-8')), + index_col=0, + header=5, + skiprows=[6]) # Rename columns based on probe locations suffixes = ['P1', 'P2', 'P3', 'incident', 'reflected'] @@ -60,11 +63,10 @@ def parse_contents(contents, filename, date): col_names[i + j] = '{}-{}'.format(col, suffix) df.columns = col_names else: - df = pd.read_csv( - io.StringIO(decoded.decode('utf-8')), - index_col=0, - header=3, - skiprows=[4]) + df = pd.read_csv(io.StringIO(decoded.decode('utf-8')), + index_col=0, + header=3, + skiprows=[4]) except Exception as e: print(e) @@ -73,23 +75,50 @@ def parse_contents(contents, filename, date): # Zero time series based on first 5s df -= df[:5].mean() - data = [] + ts = [] for col in df.columns: trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8) - data.append(trace) + ts.append(trace) layout = dict(title=basename, xaxis=dict(rangeslider=dict())) - fig = dict(data=data, layout=layout) + timeseries = dict(data=ts, layout=layout) + + # Specify wave statistics + var = [ + 'Hm0', 'Tm01', 'Tm02', 'Tm24', 'Tp', 'Ss', 'Sp', 'Ka', 'Tp1', 'alpha', + 'eps2', 'eps4' + ] + + spec = [] + for col in df.columns: + t = df.index.values[:, np.newaxis] + x = df[[col]].values - return html.Div([dcc.Graph(id='my-graph', figure=fig)]) + # Get wave statistics + xx = wo.mat2timeseries(np.hstack([t, x])) + S = xx.tospecdata() + S.freqtype = 'f' + values, _, keys = S.characteristic(var) + # Plot energy spectrum + trace = go.Scatter(x=S.args, y=S.data, name=col, opacity=0.8) + spec.append(trace) -@app.callback( - Output('output-data-upload', 'children'), [ - Input('upload-data', 'contents'), - Input('upload-data', 'filename'), - Input('upload-data', 'last_modified') + energy = dict(data=spec) + + elements = html.Div([ + dcc.Graph(id='time-series', figure=timeseries), + dcc.Graph(id='energy-spectrum', figure=energy) ]) + + return elements + + +@app.callback(Output('output-data-upload', 'children'), [ + Input('upload-data', 'contents'), + Input('upload-data', 'filename'), + Input('upload-data', 'last_modified') +]) def update_output(list_of_contents, list_of_names, list_of_dates): if list_of_contents is not None: children = [ @@ -102,7 +131,7 @@ def update_output(list_of_contents, list_of_names, list_of_dates): def main(): port = 8050 wb.open('http://localhost:{}'.format(port)) - app.run_server(port=port, debug=False) + app.run_server(port=port, debug=True) if __name__ == '__main__':