|
|
|
@ -2,24 +2,28 @@ import io
|
|
|
|
|
import os
|
|
|
|
|
import base64
|
|
|
|
|
import datetime
|
|
|
|
|
import numpy as np
|
|
|
|
|
import pandas as pd
|
|
|
|
|
import webbrowser as wb
|
|
|
|
|
|
|
|
|
|
import dash
|
|
|
|
|
from dash.dependencies import Input, Output
|
|
|
|
|
import dash_core_components as dcc
|
|
|
|
|
import dash_html_components as html
|
|
|
|
|
from dash.dependencies import Input, Output
|
|
|
|
|
|
|
|
|
|
import plotly.plotly as py
|
|
|
|
|
import plotly.graph_objs as go
|
|
|
|
|
|
|
|
|
|
import wafo.objects as wo
|
|
|
|
|
|
|
|
|
|
app = dash.Dash()
|
|
|
|
|
app.title = 'daqviewer'
|
|
|
|
|
app.scripts.config.serve_locally = True
|
|
|
|
|
app.layout = html.Div([
|
|
|
|
|
dcc.Upload(
|
|
|
|
|
id='upload-data',
|
|
|
|
|
children=html.Div([html.A('Drag and drop csv files, or click to select.')]),
|
|
|
|
|
children=html.Div(
|
|
|
|
|
[html.A('Drag and drop csv files, or click to select.')]),
|
|
|
|
|
style={
|
|
|
|
|
'width': '99%',
|
|
|
|
|
'height': '60px',
|
|
|
|
@ -45,8 +49,7 @@ def parse_contents(contents, filename, date):
|
|
|
|
|
inst_type = basename.split('_')[-1]
|
|
|
|
|
try:
|
|
|
|
|
if inst_type == 'WP':
|
|
|
|
|
df = pd.read_csv(
|
|
|
|
|
io.StringIO(decoded.decode('utf-8')),
|
|
|
|
|
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')),
|
|
|
|
|
index_col=0,
|
|
|
|
|
header=5,
|
|
|
|
|
skiprows=[6])
|
|
|
|
@ -60,8 +63,7 @@ def parse_contents(contents, filename, date):
|
|
|
|
|
col_names[i + j] = '{}-{}'.format(col, suffix)
|
|
|
|
|
df.columns = col_names
|
|
|
|
|
else:
|
|
|
|
|
df = pd.read_csv(
|
|
|
|
|
io.StringIO(decoded.decode('utf-8')),
|
|
|
|
|
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')),
|
|
|
|
|
index_col=0,
|
|
|
|
|
header=3,
|
|
|
|
|
skiprows=[4])
|
|
|
|
@ -73,19 +75,46 @@ def parse_contents(contents, filename, date):
|
|
|
|
|
# Zero time series based on first 5s
|
|
|
|
|
df -= df[:5].mean()
|
|
|
|
|
|
|
|
|
|
data = []
|
|
|
|
|
ts = []
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8)
|
|
|
|
|
data.append(trace)
|
|
|
|
|
ts.append(trace)
|
|
|
|
|
|
|
|
|
|
layout = dict(title=basename, xaxis=dict(rangeslider=dict()))
|
|
|
|
|
fig = dict(data=data, layout=layout)
|
|
|
|
|
timeseries = dict(data=ts, layout=layout)
|
|
|
|
|
|
|
|
|
|
# Specify wave statistics
|
|
|
|
|
var = [
|
|
|
|
|
'Hm0', 'Tm01', 'Tm02', 'Tm24', 'Tp', 'Ss', 'Sp', 'Ka', 'Tp1', 'alpha',
|
|
|
|
|
'eps2', 'eps4'
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
spec = []
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
t = df.index.values[:, np.newaxis]
|
|
|
|
|
x = df[[col]].values
|
|
|
|
|
|
|
|
|
|
# Get wave statistics
|
|
|
|
|
xx = wo.mat2timeseries(np.hstack([t, x]))
|
|
|
|
|
S = xx.tospecdata()
|
|
|
|
|
S.freqtype = 'f'
|
|
|
|
|
values, _, keys = S.characteristic(var)
|
|
|
|
|
|
|
|
|
|
# Plot energy spectrum
|
|
|
|
|
trace = go.Scatter(x=S.args, y=S.data, name=col, opacity=0.8)
|
|
|
|
|
spec.append(trace)
|
|
|
|
|
|
|
|
|
|
energy = dict(data=spec)
|
|
|
|
|
|
|
|
|
|
elements = html.Div([
|
|
|
|
|
dcc.Graph(id='time-series', figure=timeseries),
|
|
|
|
|
dcc.Graph(id='energy-spectrum', figure=energy)
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
return html.Div([dcc.Graph(id='my-graph', figure=fig)])
|
|
|
|
|
return elements
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.callback(
|
|
|
|
|
Output('output-data-upload', 'children'), [
|
|
|
|
|
@app.callback(Output('output-data-upload', 'children'), [
|
|
|
|
|
Input('upload-data', 'contents'),
|
|
|
|
|
Input('upload-data', 'filename'),
|
|
|
|
|
Input('upload-data', 'last_modified')
|
|
|
|
@ -102,7 +131,7 @@ def update_output(list_of_contents, list_of_names, list_of_dates):
|
|
|
|
|
def main():
|
|
|
|
|
port = 8050
|
|
|
|
|
wb.open('http://localhost:{}'.format(port))
|
|
|
|
|
app.run_server(port=port, debug=False)
|
|
|
|
|
app.run_server(port=port, debug=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|