Initial commit
commit
ec45a5864b
@ -0,0 +1 @@
|
||||
from .daqviewer import main
|
@ -0,0 +1,109 @@
|
||||
import io
|
||||
import os
|
||||
import base64
|
||||
import datetime
|
||||
import pandas as pd
|
||||
import webbrowser as wb
|
||||
|
||||
import dash
|
||||
from dash.dependencies import Input, Output
|
||||
import dash_core_components as dcc
|
||||
import dash_html_components as html
|
||||
|
||||
import plotly.plotly as py
|
||||
import plotly.graph_objs as go
|
||||
|
||||
app = dash.Dash()
|
||||
app.scripts.config.serve_locally = True
|
||||
app.layout = html.Div([
|
||||
dcc.Upload(
|
||||
id='upload-data',
|
||||
children=html.Div(['Drag and Drop or ',
|
||||
html.A('Select Files')]),
|
||||
style={
|
||||
'width': '100%',
|
||||
'height': '60px',
|
||||
'lineHeight': '60px',
|
||||
'borderWidth': '1px',
|
||||
'borderStyle': 'dashed',
|
||||
'borderRadius': '5px',
|
||||
'textAlign': 'center',
|
||||
'margin': '10px'
|
||||
},
|
||||
# Allow multiple files to be uploaded
|
||||
multiple=True),
|
||||
html.Div(id='output-data-upload'),
|
||||
])
|
||||
|
||||
|
||||
def parse_contents(contents, filename, date):
|
||||
basename, ext = os.path.splitext(filename)
|
||||
content_type, content_string = contents.split(',')
|
||||
decoded = base64.b64decode(content_string)
|
||||
|
||||
# Check instrument type
|
||||
inst_type = basename.split('_')[-1]
|
||||
try:
|
||||
if inst_type == 'WP':
|
||||
df = pd.read_csv(
|
||||
io.StringIO(decoded.decode('utf-8')),
|
||||
index_col=0,
|
||||
header=5,
|
||||
skiprows=[6])
|
||||
|
||||
# Rename columns based on probe locations
|
||||
suffixes = ['P1', 'P2', 'P3', 'incident', 'reflected']
|
||||
col_names = list(df.columns)
|
||||
for i, col in enumerate(col_names[:-4]):
|
||||
if ('.' not in col) and (col_names[i + 4] == col + '.4'):
|
||||
for j, suffix in enumerate(suffixes):
|
||||
col_names[i + j] = '{}-{}'.format(col, suffix)
|
||||
df.columns = col_names
|
||||
else:
|
||||
df = pd.read_csv(
|
||||
io.StringIO(decoded.decode('utf-8')),
|
||||
index_col=0,
|
||||
header=3,
|
||||
skiprows=[4])
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return html.Div(['There was an error processing this file.'])
|
||||
|
||||
# Zero time series based on first 5s
|
||||
df -= df[:5].mean()
|
||||
|
||||
data = []
|
||||
for col in df.columns:
|
||||
trace = go.Scatter(x=df.index, y=df[col], name=col, opacity=0.8)
|
||||
data.append(trace)
|
||||
|
||||
layout = dict(title=basename, xaxis=dict(rangeslider=dict()))
|
||||
fig = dict(data=data, layout=layout)
|
||||
|
||||
return html.Div([dcc.Graph(id='my-graph', figure=fig)])
|
||||
|
||||
|
||||
@app.callback(
|
||||
Output('output-data-upload', 'children'), [
|
||||
Input('upload-data', 'contents'),
|
||||
Input('upload-data', 'filename'),
|
||||
Input('upload-data', 'last_modified')
|
||||
])
|
||||
def update_output(list_of_contents, list_of_names, list_of_dates):
|
||||
if list_of_contents is not None:
|
||||
children = [
|
||||
parse_contents(c, n, d)
|
||||
for c, n, d in zip(list_of_contents, list_of_names, list_of_dates)
|
||||
]
|
||||
return children
|
||||
|
||||
|
||||
def main():
|
||||
port = 8050
|
||||
wb.open('http://localhost:{}'.format(port))
|
||||
app.run_server(port=port, debug=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,13 @@
|
||||
import setuptools
|
||||
|
||||
setuptools.setup(
|
||||
name='daqviewer',
|
||||
version='0.1.0',
|
||||
author='Dan Howe',
|
||||
author_email='d.howe@wrl.unsw.edu.au',
|
||||
description='Visualise DAQ csv files',
|
||||
packages=setuptools.find_packages(),
|
||||
entry_points={'console_scripts': [
|
||||
'daqviewer = daqviewer:main',
|
||||
]},
|
||||
)
|
Loading…
Reference in New Issue