You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
196 lines
5.3 KiB
Python
196 lines
5.3 KiB
Python
import io
|
|
import os
|
|
import json
|
|
import base64
|
|
import datetime
|
|
import numpy as np
|
|
import pandas as pd
|
|
import webbrowser as wb
|
|
|
|
import dash
|
|
import dash_table
|
|
import dash_core_components as dcc
|
|
import dash_html_components as html
|
|
from dash.dependencies import Input, Output
|
|
|
|
import chart_studio.plotly as py
|
|
import plotly.graph_objs as go
|
|
|
|
import wafo.objects as wo
|
|
|
|
app = dash.Dash()
|
|
app.title = 'daqviewer'
|
|
app.scripts.config.serve_locally = True
|
|
app.layout = html.Div([
|
|
dcc.Upload(
|
|
id='upload-data',
|
|
children=html.Div(
|
|
[html.A('Drag and drop csv files, or click to select.')]),
|
|
style={
|
|
'width': '99%',
|
|
'height': '60px',
|
|
'lineHeight': '60px',
|
|
'borderWidth': '1px',
|
|
'borderStyle': 'dashed',
|
|
'borderRadius': '5px',
|
|
'textAlign': 'center',
|
|
'margin': '10px'
|
|
},
|
|
# Only upload one file
|
|
multiple=False),
|
|
html.Div(id='output-data-upload'),
|
|
html.Div(id='json-storage', style={'display': 'none'}),
|
|
])
|
|
|
|
|
|
def parse_contents(contents, filename):
|
|
basename, ext = os.path.splitext(filename)
|
|
content_type, content_string = contents.split(',')
|
|
decoded = base64.b64decode(content_string)
|
|
|
|
# Check instrument type
|
|
inst_type = basename.split('_')[-1]
|
|
if inst_type == 'WP':
|
|
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')),
|
|
index_col=0,
|
|
header=5,
|
|
skiprows=[6])
|
|
|
|
# Rename columns based on probe locations
|
|
suffixes = ['P1', 'P2', 'P3', 'incident', 'reflected']
|
|
col_names = list(df.columns)
|
|
for i, col in enumerate(col_names[:-4]):
|
|
if ('.' not in col) and (col_names[i + 4] == col + '.4'):
|
|
for j, suffix in enumerate(suffixes):
|
|
col_names[i + j] = '{}-{}'.format(col, suffix)
|
|
df.columns = col_names
|
|
else:
|
|
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')),
|
|
index_col=0,
|
|
header=3,
|
|
skiprows=[4])
|
|
|
|
# Zero time series based on first 5s
|
|
df -= df[:5].mean()
|
|
|
|
# Round dataframe to save disk space
|
|
df = df.round(1)
|
|
|
|
return df.to_json(orient='table')
|
|
|
|
|
|
@app.callback(Output('json-storage', 'children'), [
|
|
Input('upload-data', 'contents'),
|
|
Input('upload-data', 'filename'),
|
|
])
|
|
def csv_to_json(contents, name):
|
|
if contents is not None:
|
|
return parse_contents(contents, name)
|
|
|
|
|
|
@app.callback(Output('output-data-upload', 'children'),
|
|
[Input('json-storage', 'children')])
|
|
def read_json_data(json_data):
|
|
if json_data is None:
|
|
return html.Div([])
|
|
|
|
df = pd.read_json(json_data, orient='table')
|
|
ts = []
|
|
for col in df.columns:
|
|
trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8)
|
|
ts.append(trace)
|
|
|
|
layout = {'title': 'basename', 'xaxis': {'rangeslider': {}}}
|
|
graph_timeseries = dcc.Graph(id='time-series',
|
|
figure={
|
|
'data': ts,
|
|
'layout': layout
|
|
})
|
|
|
|
# Create spectral dataframe
|
|
dfs = pd.DataFrame(columns=range(15))
|
|
|
|
spec = []
|
|
for col in df.columns:
|
|
t = df.index.values[:, np.newaxis]
|
|
x = df[[col]].values
|
|
|
|
# Get spectral statistics
|
|
xx = wo.mat2timeseries(np.hstack([t, x]))
|
|
S = xx.tospecdata()
|
|
values, _, keys = S.characteristic(range(15))
|
|
S.freqtype = 'f'
|
|
|
|
# Update column names
|
|
if dfs.columns[0] == 0:
|
|
dfs.columns = keys
|
|
|
|
# Add spectral values to dataframe
|
|
dfs.loc[col, keys] = values
|
|
|
|
# Plot energy spectrum
|
|
trace = go.Scatter(x=S.args, y=S.data, name=col, opacity=0.8)
|
|
spec.append(trace)
|
|
|
|
graph_energy = dcc.Graph(id='energy-spectrum',
|
|
figure={
|
|
'data': spec,
|
|
'layout': {
|
|
'xaxis': {
|
|
'range': [0, 3]
|
|
}
|
|
}
|
|
})
|
|
|
|
# Add location column
|
|
dfs['location'] = dfs.index
|
|
|
|
variables = {
|
|
'location': '',
|
|
'Hm0': '◊',
|
|
'Tp': '□',
|
|
'Tp1': 'o',
|
|
}
|
|
|
|
# Add table
|
|
table_energy = dash_table.DataTable(
|
|
id='datatable-row-ids',
|
|
columns=[{
|
|
'name': [val, key],
|
|
'id': key
|
|
} for key, val in variables.items()],
|
|
data=dfs.to_dict('records'),
|
|
editable=False,
|
|
sort_action='native',
|
|
sort_mode='multi',
|
|
row_selectable='multi',
|
|
row_deletable=False,
|
|
selected_rows=np.arange(dfs.shape[0]),
|
|
style_as_list_view=True,
|
|
style_cell={
|
|
'minWidth': '100px',
|
|
'width': '100px',
|
|
'maxWidth': '100px',
|
|
},
|
|
)
|
|
|
|
elements = html.Div([
|
|
graph_timeseries, graph_energy, table_energy,
|
|
html.Div(id='datatable-row-ids-container')
|
|
])
|
|
|
|
return elements
|
|
|
|
|
|
#####
|
|
|
|
|
|
def main():
|
|
port = 8050
|
|
wb.open('http://localhost:{}'.format(port))
|
|
app.run_server(port=port, debug=True)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|