|
|
@ -1,5 +1,6 @@
|
|
|
|
import io
|
|
|
|
import io
|
|
|
|
import os
|
|
|
|
import os
|
|
|
|
|
|
|
|
import json
|
|
|
|
import base64
|
|
|
|
import base64
|
|
|
|
import datetime
|
|
|
|
import datetime
|
|
|
|
import numpy as np
|
|
|
|
import numpy as np
|
|
|
@ -38,6 +39,7 @@ app.layout = html.Div([
|
|
|
|
# Only upload one file
|
|
|
|
# Only upload one file
|
|
|
|
multiple=False),
|
|
|
|
multiple=False),
|
|
|
|
html.Div(id='output-data-upload'),
|
|
|
|
html.Div(id='output-data-upload'),
|
|
|
|
|
|
|
|
html.Div(id='json-storage', style={'display': 'none'}),
|
|
|
|
])
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -70,7 +72,6 @@ def parse_contents(contents, filename):
|
|
|
|
skiprows=[4])
|
|
|
|
skiprows=[4])
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
return html.Div(['There was an error processing this file.'])
|
|
|
|
return html.Div(['There was an error processing this file.'])
|
|
|
|
|
|
|
|
|
|
|
|
# Zero time series based on first 5s
|
|
|
|
# Zero time series based on first 5s
|
|
|
@ -79,12 +80,31 @@ def parse_contents(contents, filename):
|
|
|
|
# Round dataframe to save disk space
|
|
|
|
# Round dataframe to save disk space
|
|
|
|
df = df.round(1)
|
|
|
|
df = df.round(1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return df.to_json(orient='table')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.callback(Output('json-storage', 'children'), [
|
|
|
|
|
|
|
|
Input('upload-data', 'contents'),
|
|
|
|
|
|
|
|
Input('upload-data', 'filename'),
|
|
|
|
|
|
|
|
])
|
|
|
|
|
|
|
|
def update_output(contents, name):
|
|
|
|
|
|
|
|
if contents is not None:
|
|
|
|
|
|
|
|
return parse_contents(contents, name)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.callback(Output('output-data-upload', 'children'),
|
|
|
|
|
|
|
|
[Input('json-storage', 'children')])
|
|
|
|
|
|
|
|
def read_json_data(json_data):
|
|
|
|
|
|
|
|
if json_data is None:
|
|
|
|
|
|
|
|
return html.Div([])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
df = pd.read_json(json_data, orient='table')
|
|
|
|
ts = []
|
|
|
|
ts = []
|
|
|
|
for col in df.columns:
|
|
|
|
for col in df.columns:
|
|
|
|
trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8)
|
|
|
|
trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8)
|
|
|
|
ts.append(trace)
|
|
|
|
ts.append(trace)
|
|
|
|
|
|
|
|
|
|
|
|
layout = {'title': basename, 'xaxis': {'rangeslider': {}}}
|
|
|
|
layout = {'title': 'basename', 'xaxis': {'rangeslider': {}}}
|
|
|
|
graph_timeseries = dcc.Graph(id='time-series',
|
|
|
|
graph_timeseries = dcc.Graph(id='time-series',
|
|
|
|
figure={
|
|
|
|
figure={
|
|
|
|
'data': ts,
|
|
|
|
'data': ts,
|
|
|
@ -150,25 +170,15 @@ def parse_contents(contents, filename):
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
return graph_timeseries, graph_energy, table_energy
|
|
|
|
elements = html.Div([
|
|
|
|
|
|
|
|
graph_timeseries, graph_energy, table_energy,
|
|
|
|
|
|
|
|
html.Div(id='datatable-row-ids-container')
|
|
|
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return elements
|
|
|
|
|
|
|
|
|
|
|
|
@app.callback(Output('output-data-upload', 'children'), [
|
|
|
|
|
|
|
|
Input('upload-data', 'contents'),
|
|
|
|
|
|
|
|
Input('upload-data', 'filename'),
|
|
|
|
|
|
|
|
])
|
|
|
|
|
|
|
|
def update_output(contents, name):
|
|
|
|
|
|
|
|
if contents is not None:
|
|
|
|
|
|
|
|
graph_ts, graph_spec, table_spec = parse_contents(contents, name)
|
|
|
|
|
|
|
|
elements = html.Div([
|
|
|
|
|
|
|
|
graph_ts, graph_spec, table_spec,
|
|
|
|
|
|
|
|
html.Div(id='datatable-row-ids-container')
|
|
|
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return elements
|
|
|
|
#####
|
|
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
return html.Div([])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
def main():
|
|
|
|