You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2338 lines
83 KiB
Plaintext

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Data exploration\n",
"This notebook provides an example how the data has been loaded and accessed for further analysis."
]
},
{
"cell_type": "markdown",
"metadata": {
"heading_collapsed": true
},
"source": [
"## Setup notebook"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-11T22:16:57.761856Z",
"start_time": "2018-12-11T22:16:55.841860Z"
},
"hidden": true
},
"outputs": [],
"source": [
"# Enable autoreloading of our modules. \n",
"# Most of the code will be located in the /src/ folder, \n",
"# and then called from the notebook.\n",
"%matplotlib inline\n",
"%reload_ext autoreload\n",
"%autoreload"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-11T22:17:15.644076Z",
"start_time": "2018-12-11T22:16:57.763862Z"
},
"hidden": true,
"scrolled": true
},
"outputs": [],
"source": [
"from IPython.core.debugger import set_trace\n",
"\n",
"import pandas as pd\n",
"import numpy as np\n",
"import os\n",
"\n",
"import plotly\n",
"import plotly.graph_objs as go\n",
"import plotly.plotly as py\n",
"import plotly.tools as tls\n",
"import plotly.figure_factory as ff\n",
"import plotly.io as pio\n",
"\n",
"\n",
"import matplotlib\n",
"from matplotlib import cm\n",
"import colorlover as cl\n",
"\n",
"from ipywidgets import widgets, Output\n",
"from IPython.display import display, clear_output, Image, HTML\n",
"\n",
"from sklearn.metrics import confusion_matrix"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Import data\n",
"Import our data into pandas Dataframes for the analysis. Data files are `.csv` files which are stored in the `./data/interim/` folder."
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-13T02:08:52.148080Z",
"start_time": "2018-12-13T02:08:29.576649Z"
},
"pixiedust": {
"displayParams": {}
},
"scrolled": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Importing waves.csv\n",
"Importing tides.csv\n",
"Importing profiles.csv\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\z5189959\\Desktop\\nsw-2016-storm-impact\\.venv\\lib\\site-packages\\numpy\\lib\\arraysetops.py:522: FutureWarning:\n",
"\n",
"elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison\n",
"\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Importing sites.csv\n",
"Importing profile_features.csv\n",
"Importing impacts_forecasted_foreshore_slope_sto06.csv\n",
"Importing impacts_forecasted_mean_slope_sto06.csv\n",
"Importing impacts_observed.csv\n",
"Importing twl_foreshore_slope_sto06.csv\n",
"Importing twl_mean_slope_sto06.csv\n",
"Done!\n"
]
}
],
"source": [
"def df_from_csv(csv, index_col, data_folder='../data/interim'):\n",
" print('Importing {}'.format(csv))\n",
" return pd.read_csv(os.path.join(data_folder,csv), index_col=index_col)\n",
"\n",
"df_waves = df_from_csv('waves.csv', index_col=[0, 1])\n",
"df_tides = df_from_csv('tides.csv', index_col=[0, 1])\n",
"df_profiles = df_from_csv('profiles.csv', index_col=[0, 1, 2])\n",
"df_sites = df_from_csv('sites.csv', index_col=[0])\n",
"df_profile_features = df_from_csv('profile_features.csv', index_col=[0])\n",
"\n",
"# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n",
" },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n",
"\n",
"\n",
"twls = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n",
" 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n",
" }\n",
"}\n",
"print('Done!')"
]
},
{
"cell_type": "markdown",
"metadata": {
"ExecuteTime": {
"end_time": "2018-11-27T23:02:57.631306Z",
"start_time": "2018-11-27T23:02:57.615263Z"
}
},
"source": [
"## Profile/timeseries dashboard"
]
},
{
"cell_type": "markdown",
"metadata": {
"hide_input": true
},
"source": [
"The following interactive data explorer displays information on a per `site_id` basis. It can be used to examine pre/post storm cross-sections, water level time series and observed/predicted storm impacts."
]
},
{
"cell_type": "code",
"execution_count": 79,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-13T22:22:40.283477Z",
"start_time": "2018-12-13T22:22:39.430903Z"
},
"code_folding": [
408
],
"hide_input": false
},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "47ba792667fd42f9b19c2fa574d8667c",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"VBox(children=(VBox(children=(HTML(value='<b>Filter by observed and predicted impacts:</b>'), HBox(children=(V…"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Create widgets for filtering by observed and forecasted impacts\n",
"filter_title = widgets.HTML(\n",
" value=\"<b>Filter by observed and predicted impacts:</b>\", )\n",
"\n",
"titles = ['Observed Impacts']\n",
"selectboxes = [\n",
" widgets.SelectMultiple(\n",
" options=impacts['observed'].storm_regime.dropna().unique().tolist(),\n",
" value=impacts['observed'].storm_regime.dropna().unique().tolist(),\n",
" disabled=False)\n",
"]\n",
"\n",
"# Iterate through each of our forecasted impacts\n",
"for forecast in impacts['forecasted']:\n",
" selectboxes.append(\n",
" widgets.SelectMultiple(\n",
" options=impacts['forecasted'][forecast].storm_regime.dropna().\n",
" unique().tolist(),\n",
" value=impacts['forecasted'][forecast].storm_regime.dropna().\n",
" unique().tolist(),\n",
" disabled=False))\n",
" titles.append('Forecasted: {}'.format(forecast))\n",
"\n",
"titles = [widgets.HTML(value=title) for title in titles]\n",
"\n",
"children = widgets.HBox(children=[\n",
" widgets.VBox(children=[title, box])\n",
" for title, box in zip(titles, selectboxes)\n",
"])\n",
"filter_container = widgets.VBox(children=[filter_title, children])\n",
"\n",
"# Create widgets for selecting site_id\n",
"site_id_title = widgets.HTML(value=\"<b>Filter by site_id:</b>\", )\n",
"\n",
"site_id_select = widgets.Dropdown(\n",
" description='site_id: ',\n",
" value='NARRA0001',\n",
" # options=df_profiles.index.get_level_values('site_id').unique().\n",
" # sort_values().tolist(),\n",
" options=df_no_crests.index.tolist(),\n",
")\n",
"\n",
"site_id_impacts = widgets.HTML(value=\"\", )\n",
"\n",
"site_id_container = widgets.HBox(children=[\n",
" widgets.VBox(\n",
" children=[site_id_title,\n",
" widgets.HBox(children=[site_id_select])]), site_id_impacts\n",
"])\n",
"\n",
"# Build colors for each of our forecasts\n",
"colors = list(\n",
" reversed(cl.scales[str(max(len(impacts['forecasted']),\n",
" 3))]['seq']['YlGnBu']))\n",
"\n",
"# Add panel for pre/post storm profiles\n",
"trace1 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Pre Storm Profile',\n",
" line=dict(color=('rgb(51,160,44)'), width=2))\n",
"trace2 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Post Storm Profile',\n",
" line=dict(color=('rgb(255,127,0)'), width=2))\n",
"trace3 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Pre-storm dune crest',\n",
" mode='markers',\n",
" marker=dict(\n",
" color='rgba(255,255,255,0)',\n",
" size=10,\n",
" line=dict(color='rgba(106,61,154, 1)', width=2)),\n",
")\n",
"trace4 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Pre-storm dune toe',\n",
" mode='markers',\n",
" marker=dict(\n",
" color='rgba(255,255,255,0)',\n",
" size=10,\n",
" line=dict(color='rgba(202,178,214,1)', width=2)),\n",
")\n",
"\n",
"trace5 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Post-storm dune crest',\n",
" mode='markers',\n",
" marker=dict(\n",
" color='rgba(255,255,255,0)',\n",
" size=10,\n",
" line=dict(color='rgba(106,61,154, 1)', width=2),\n",
" symbol='square'),\n",
")\n",
"trace6 = go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Post-storm dune toe',\n",
" mode='markers',\n",
" marker=dict(\n",
" color='rgba(255,255,255,0)',\n",
" size=10,\n",
" line=dict(color='rgba(202,178,214,1)', width=2),\n",
" symbol='square'),\n",
")\n",
"\n",
"forecast_traces = []\n",
"for forecast, color in zip(impacts['forecasted'], colors):\n",
" forecast_traces.append(\n",
" go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='Peak R_high: {}'.format(forecast),\n",
" mode='lines',\n",
" line=dict(\n",
" color=color,\n",
" width=4,\n",
" )))\n",
"\n",
"layout = go.Layout(\n",
" title='Bed Profiles',\n",
" height=300,\n",
" legend=dict(font={'size': 10}),\n",
" margin=dict(t=50, b=50, l=50, r=20),\n",
" xaxis=dict(\n",
" title='x (m)',\n",
" autorange=True,\n",
" showgrid=True,\n",
" zeroline=True,\n",
" showline=True,\n",
" range=[0, 200]),\n",
" yaxis=dict(\n",
" title='z (m)',\n",
" autorange=False,\n",
" showgrid=True,\n",
" zeroline=True,\n",
" showline=True,\n",
" range=[-1, 20]))\n",
"\n",
"g_profiles = go.FigureWidget(\n",
" data=[trace1, trace2, trace3, trace4, trace5, trace6] + forecast_traces,\n",
" layout=layout)\n",
"\n",
"# Add panel for google maps\n",
"mapbox_access_token = 'pk.eyJ1IjoiY2hyaXNsZWFtYW4iLCJhIjoiY2pvNTY1MzZpMDc2OTN2bmw5MGsycHp5bCJ9.U2dwFg2c7RFjUNSayERUiw'\n",
"\n",
"data = [\n",
" go.Scattermapbox(\n",
" lat=df_sites['lat'],\n",
" lon=df_sites['lon'],\n",
" mode='markers',\n",
" marker=dict(size=10),\n",
" text=df_sites.index.get_level_values('site_id'),\n",
" ),\n",
" go.Scattermapbox(\n",
" lat=[0],\n",
" lon=[0],\n",
" mode='markers',\n",
" marker=dict(\n",
" size=20,\n",
" color='rgb(255, 0, 0)',\n",
" opacity=0.5,\n",
" ),\n",
" text=df_sites.index.get_level_values('site_id'),\n",
" ),\n",
"]\n",
"\n",
"layout = go.Layout(\n",
" autosize=True,\n",
" height=300,\n",
" hovermode='closest',\n",
" showlegend=False,\n",
" margin=dict(t=50, b=50, l=20, r=20),\n",
" mapbox=dict(\n",
" accesstoken=mapbox_access_token,\n",
" bearing=0,\n",
" center=dict(lat=-33.7, lon=151.3),\n",
" pitch=0,\n",
" zoom=12,\n",
" style='satellite-streets'),\n",
")\n",
"\n",
"fig = dict(data=data, layout=layout)\n",
"g_map = go.FigureWidget(data=data, layout=layout)\n",
"\n",
"subplot = tls.make_subplots(3, 1, print_grid=False, shared_xaxes=True)\n",
"g_timeseries = go.FigureWidget(subplot)\n",
"\n",
"# Add trace for Hs0\n",
"g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 1],\n",
" y=[0, 1],\n",
" name='Hs0',\n",
" ),\n",
" row=3,\n",
" col=1,\n",
")\n",
"\n",
"# Add trace for Tp\n",
"g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 1],\n",
" y=[0, 1],\n",
" name='Tp',\n",
" ),\n",
" row=3,\n",
" col=1,\n",
")\n",
"\n",
"# Add water levels\n",
"g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 3],\n",
" y=[0, 3],\n",
" name='Dune Crest',\n",
" mode='lines',\n",
" line=dict(color=('rgb(214, 117, 14)'), width=2, dash='dot')),\n",
" row=1,\n",
" col=1)\n",
"\n",
"g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 3],\n",
" y=[0, 3],\n",
" name='Dune Toe',\n",
" mode='lines',\n",
" line=dict(color=('rgb(142, 77, 8)'), width=2, dash='dash')),\n",
" row=1,\n",
" col=1)\n",
"\n",
"g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 3],\n",
" y=[0, 3],\n",
" name='Tide+Surge WL',\n",
" line=dict(color=('rgb(8,51,137)'), width=2, dash='dot')),\n",
" row=1,\n",
" col=1)\n",
"\n",
"for forecast, color in zip(twls['forecasted'], colors):\n",
" g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0],\n",
" y=[0],\n",
" name='R_high: {}'.format(forecast),\n",
" line=dict(color=color, width=3)),\n",
" row=1,\n",
" col=1)\n",
"\n",
"# Add trace for each forecasted beta term\n",
"for forecast, color in zip(impacts['forecasted'], colors):\n",
" g_timeseries.add_trace(\n",
" go.Scatter(\n",
" x=[0, 1],\n",
" y=[0, 1],\n",
" name='Beta: {}'.format(forecast),\n",
" line=dict(color=color, width=3)),\n",
" row=2,\n",
" col=1,\n",
" )\n",
"\n",
"# Create axis for Tp on same plot as Hs\n",
"g_timeseries['layout']['yaxis4'] = {'overlaying': 'y3', 'side': 'right'}\n",
"g_timeseries.data[1]['yaxis'] = 'y4'\n",
"\n",
"# Add labels to each axis\n",
"g_timeseries.layout['xaxis']['title'] = 'datetime'\n",
"g_timeseries.layout['yaxis1']['title'] = 'z (mAHD)'\n",
"g_timeseries.layout['yaxis2']['title'] = 'beta (-)'\n",
"g_timeseries.layout['yaxis3']['title'] = 'Hs0 (m)'\n",
"g_timeseries.layout['yaxis4']['title'] = 'Tp (s)'\n",
"\n",
"# Update figure size\n",
"g_timeseries['layout'].update(height=400, legend=dict(font={'size': 10}))\n",
"g_timeseries['layout'].update(margin=dict(t=20, l=50, r=20, b=100))\n",
"\n",
"# Add panel for some tables\n",
"titles = ['observed'] + [forecast for forecast in impacts['forecasted']]\n",
"titles = [widgets.HTML(value=\"{}\".format(title)) for title in titles]\n",
"\n",
"\n",
"def get_observed_impacts_table(site_id):\n",
" display(impacts['observed'].query(\"site_id=='{}'\".format(site_id)).T)\n",
"\n",
"\n",
"def get_forecasted_impacts_table(site_id, forecast):\n",
" display(impacts['forecasted'][forecast].query(\n",
" \"site_id=='{}'\".format(site_id)).T)\n",
"\n",
"\n",
"impacts_table_observed = widgets.interactive_output(\n",
" get_observed_impacts_table, {'site_id': site_id_select})\n",
"forecasted_impacts_tables = []\n",
"for forecast, title in zip(impacts['forecasted'], titles[1:]):\n",
" forecasted_impacts_tables.append(\n",
" widgets.interactive_output(get_forecasted_impacts_table, {\n",
" 'site_id': site_id_select,\n",
" 'forecast': title\n",
" }))\n",
"\n",
"tables = [impacts_table_observed] + forecasted_impacts_tables\n",
"\n",
"title_tables = [\n",
" widgets.VBox(children=[title, table])\n",
" for title, table in zip(titles, tables)\n",
"]\n",
"\n",
"tables_container = widgets.HBox(children=[*title_tables])\n",
"\n",
"\n",
"def update_profile(change):\n",
"\n",
" site_id = site_id_select.value\n",
"\n",
" if site_id is None:\n",
" return\n",
"\n",
" site_profile = df_profiles.query('site_id == \"{}\"'.format(site_id))\n",
" prestorm_profile = site_profile.query('profile_type == \"prestorm\"')\n",
" poststorm_profile = site_profile.query('profile_type == \"poststorm\"')\n",
"\n",
" poststorm_x = poststorm_profile.index.get_level_values('x').tolist()\n",
" poststorm_z = poststorm_profile.z.tolist()\n",
"\n",
" prestorm_x = prestorm_profile.index.get_level_values('x').tolist()\n",
" prestorm_z = prestorm_profile.z.tolist()\n",
"\n",
" prestorm_site_features = df_profile_features.query(\n",
" 'site_id == \"{}\" and profile_type==\"prestorm\"'.format(site_id))\n",
" prestorm_dune_crest_x = prestorm_site_features.dune_crest_x\n",
" prestorm_dune_crest_z = prestorm_site_features.dune_crest_z\n",
" prestorm_dune_toe_x = prestorm_site_features.dune_toe_x\n",
" prestorm_dune_toe_z = prestorm_site_features.dune_toe_z\n",
"\n",
" poststorm_site_features = df_profile_features.query(\n",
" 'site_id == \"{}\" and profile_type==\"poststorm\"'.format(site_id))\n",
" poststorm_dune_crest_x = poststorm_site_features.dune_crest_x\n",
" poststorm_dune_crest_z = poststorm_site_features.dune_crest_z\n",
" poststorm_dune_toe_x = poststorm_site_features.dune_toe_x\n",
" poststorm_dune_toe_z = poststorm_site_features.dune_toe_z\n",
"\n",
" # Update beach profile section plots\n",
" with g_profiles.batch_update():\n",
" g_profiles.data[0].x = prestorm_x\n",
" g_profiles.data[0].y = prestorm_z\n",
" g_profiles.data[1].x = poststorm_x\n",
" g_profiles.data[1].y = poststorm_z\n",
" g_profiles.data[2].x = prestorm_dune_crest_x\n",
" g_profiles.data[2].y = prestorm_dune_crest_z\n",
" g_profiles.data[3].x = prestorm_dune_toe_x\n",
" g_profiles.data[3].y = prestorm_dune_toe_z\n",
" g_profiles.data[4].x = poststorm_dune_crest_x\n",
" g_profiles.data[4].y = poststorm_dune_crest_z\n",
" g_profiles.data[5].x = poststorm_dune_toe_x\n",
" g_profiles.data[5].y = poststorm_dune_toe_z\n",
"\n",
" for n, forecast in enumerate(impacts['forecasted']):\n",
" R_high = max(impacts['forecasted'][forecast].query(\n",
" \"site_id=='{}'\".format(site_id)).R_high)\n",
" g_profiles.data[6 + n].x = [200, 400]\n",
" g_profiles.data[6 + n].y = [R_high, R_high]\n",
"\n",
" # Relocate plan of satellite imagery\n",
" site_coords = df_sites.query('site_id == \"{}\"'.format(site_id))\n",
" with g_map.batch_update():\n",
" g_map.layout.mapbox['center'] = {\n",
" 'lat': site_coords['lat'].values[0],\n",
" 'lon': site_coords['lon'].values[0]\n",
" }\n",
" g_map.layout.mapbox['zoom'] = 15\n",
" g_map.data[1].lat = [site_coords['lat'].values[0]]\n",
" g_map.data[1].lon = [site_coords['lon'].values[0]]\n",
" g_map.data[1].text = site_coords['lon'].index.get_level_values(\n",
" 'site_id').tolist()\n",
"\n",
" # Update time series plots\n",
" df_waves_site = df_waves.query(\"site_id=='{}'\".format(site_id))\n",
" times = df_waves_site.index.get_level_values('datetime').tolist()\n",
" Hs0s = df_waves_site.Hs0.tolist()\n",
" Tps = df_waves_site.Tp.tolist()\n",
"\n",
" df_tide_site = df_tides.query(\"site_id=='{}'\".format(site_id))\n",
" mask = (df_tide_site.index.get_level_values('datetime') >= min(times)) & (\n",
" df_tide_site.index.get_level_values('datetime') <= max(times))\n",
" df_tide_site = df_tide_site.loc[mask]\n",
"\n",
" with g_timeseries.batch_update():\n",
" g_timeseries.data[0].x = times\n",
" g_timeseries.data[0].y = Hs0s\n",
" g_timeseries.data[1].x = times\n",
" g_timeseries.data[1].y = Tps\n",
"\n",
" # Update beta values\n",
" idx_betas = [\n",
" n for n, x in enumerate(g_timeseries.data) if 'Beta' in x.name\n",
" ]\n",
" for i, forecast in zip(idx_betas, twls['forecasted']):\n",
" df_twl = twls['forecasted'][forecast].query(\n",
" \"site_id=='{}'\".format(site_id))\n",
" times = df_twl.index.get_level_values('datetime').tolist()\n",
" beta = df_twl.beta.tolist()\n",
" g_timeseries.data[i].x = times\n",
" g_timeseries.data[i].y = beta\n",
"\n",
" g_timeseries.data[2].x = [min(times), max(times)]\n",
" g_timeseries.data[3].x = [min(times), max(times)]\n",
" g_timeseries.data[4].x = df_tide_site.index.get_level_values(\n",
" 'datetime')\n",
" g_timeseries.data[2].y = prestorm_dune_crest_z.tolist(\n",
" )[0], prestorm_dune_crest_z.tolist()[0],\n",
" g_timeseries.data[3].y = prestorm_dune_toe_z.tolist(\n",
" )[0], prestorm_dune_toe_z.tolist()[0],\n",
" g_timeseries.data[4].y = df_tide_site.tide.tolist()\n",
"\n",
" # Update rhigh values\n",
" idx_betas = [\n",
" n for n, x in enumerate(g_timeseries.data) if 'R_high' in x.name\n",
" ]\n",
" for i, forecast in zip(idx_betas, twls['forecasted']):\n",
" df_twl = twls['forecasted'][forecast].query(\n",
" \"site_id=='{}'\".format(site_id))\n",
" times = df_twl.index.get_level_values('datetime').tolist()\n",
" R_high = df_twl.R_high.tolist()\n",
" g_timeseries.data[i].x = times\n",
" g_timeseries.data[i].y = R_high\n",
"\n",
" # Update site id impacts\n",
" observed_regime = impacts['observed'].query(\n",
" \"site_id=='{}'\".format(site_id)).storm_regime.values[0]\n",
" site_id_impacts.value = \"Observed: <b>{}</b><br>\".format(observed_regime)\n",
"\n",
" for forecast in impacts['forecasted']:\n",
" regime = impacts['forecasted'][forecast].query(\n",
" \"site_id=='{}'\".format(site_id)).storm_regime.values[0]\n",
" site_id_impacts.value += '{}: <b>{}</b><br>'.format(forecast, regime)\n",
"\n",
"\n",
"site_id_select.observe(update_profile, names=\"value\")\n",
"\n",
"\n",
"def update_filter(change):\n",
"\n",
" # Iterate through each box, only keeping site_ids which are not filtered out by each box\n",
" valid_site_ids = impacts['observed'].index.tolist()\n",
" dfs = [impacts['observed']\n",
" ] + [impacts['forecasted'][key] for key in impacts['forecasted']]\n",
"\n",
" for box, df in zip(selectboxes, dfs):\n",
" valid_site_ids = list(\n",
" set(valid_site_ids).intersection(\n",
" set(df[df.storm_regime.isin(box.value)].index.tolist())))\n",
" site_id_select.options = sorted(valid_site_ids)\n",
"\n",
" # TODO Update options in selectboxes with number of observations?\n",
"\n",
"\n",
"# Update the filter if any of the boxes changes\n",
"for box in selectboxes:\n",
" box.observe(update_filter, names=\"value\")\n",
"\n",
"# Display our widgets!\n",
"widgets.VBox([\n",
" filter_container, site_id_container,\n",
" widgets.HBox([g_profiles, g_map]), g_timeseries, tables_container\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2018-11-27T23:06:31.686277Z",
"start_time": "2018-11-27T23:06:31.665206Z"
}
},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {
"ExecuteTime": {
"end_time": "2018-11-22T22:52:36.039701Z",
"start_time": "2018-11-22T22:52:36.035189Z"
},
"heading_collapsed": true,
"hide_input": true,
"scrolled": true
},
"source": [
"## Confusion matrix\n",
"This visualization looks at how well the storm impact predictions performed. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-11T22:41:46.514495Z",
"start_time": "2018-12-11T22:41:27.050029Z"
},
"code_folding": [],
"hidden": true,
"hide_input": false,
"scrolled": false
},
"outputs": [],
"source": [
"# Create colorscale\n",
"rdylgr_cmap = matplotlib.cm.get_cmap('RdYlGn')\n",
"\n",
"norm = matplotlib.colors.Normalize(vmin=0, vmax=255)\n",
"\n",
"def matplotlib_to_plotly(cmap, pl_entries):\n",
" h = 1.0/(pl_entries-1)\n",
" pl_colorscale = []\n",
"\n",
" for k in range(pl_entries):\n",
" C = list(map(np.uint8, np.array(cmap(k*h)[:3])*255))\n",
" pl_colorscale.append([k*h, 'rgb'+str((C[0], C[1], C[2]))])\n",
"\n",
" return pl_colorscale\n",
"\n",
"rdylgr = matplotlib_to_plotly(rdylgr_cmap, 255)\n",
"\n",
"\n",
"\n",
"# Create widget for list of beaches.\n",
"beaches = df_sites.beach.unique().tolist()\n",
"\n",
"beach_title = widgets.HTML(value=\"<b>Filter by beach:</b>\", )\n",
"\n",
"beach_select = widgets.SelectMultiple(\n",
" options=beaches, value=beaches, disabled=False)\n",
"\n",
"beach_container = widgets.VBox([beach_title, beach_select])\n",
"\n",
"# Create confusion matrix for each forecasted impact data set\n",
"heatmaps = []\n",
"for forecast in impacts['forecasted']:\n",
"\n",
" z = [[.1, .3, .5, 2], [1.0, .8, .6, 1], [1.4, .28, 1.6, .21],\n",
" [.6, .4, .2, 3]]\n",
"\n",
" x = ['swash', 'collision', 'overwash', 'inundation']\n",
" y = list(reversed(x))\n",
"\n",
" z_text = z\n",
"\n",
" fig = ff.create_annotated_heatmap(z, x=x, y=y, annotation_text=z_text, colorscale=rdylgr)\n",
" heatmap = go.FigureWidget(data=fig.data, layout=fig.layout)\n",
"\n",
" heatmap.layout.update(\n",
" height=300, margin=go.layout.Margin(l=100, r=100, b=40, t=40, pad=0))\n",
" heatmap.layout.xaxis.update(title='Predicted')\n",
" heatmap.layout.yaxis.update(title='Observed')\n",
" heatmap_title = widgets.HTML(value=\"<b>{}</b>\".format(forecast) )\n",
" heatmaps.append(widgets.VBox([heatmap_title, heatmap]))\n",
"\n",
" \n",
"def update_heatmaps(change):\n",
" \n",
" for forecast, heatmap in zip(impacts['forecasted'], heatmaps):\n",
" selected_site_ids = df_sites[df_sites.beach.isin(beach_select.value)].index.tolist()\n",
"\n",
" df_ob = impacts['observed']\n",
" df_fo = impacts['forecasted'][forecast]\n",
"\n",
" observed_regimes = df_ob[df_ob.index.isin(selected_site_ids)].storm_regime.dropna().rename(\"observed_regime\")\n",
" forecasted_regimes = df_fo[df_fo.index.isin(selected_site_ids)].storm_regime.dropna().rename(\"forecasted_regime\")\n",
"\n",
" if any([observed_regimes.empty, forecasted_regimes.empty]):\n",
" return\n",
" \n",
" df_compare = pd.concat([observed_regimes, forecasted_regimes], axis='columns', names=['a','b'], sort=True)\n",
" df_compare.dropna(axis='index',inplace=True)\n",
"\n",
" z = confusion_matrix(df_compare.observed_regime.tolist(), df_compare.forecasted_regime.tolist(), labels = ['swash','collision','overwash','inundation'])\n",
" z = np.flip(z,axis=0)\n",
" z_list = list(reversed(z.tolist()))\n",
" \n",
" # Make incorrect values negative, so they get assigned a different color.\n",
" # Better for visualization\n",
" z_neg_incorrect = np.flip(np.identity(4),axis=0)\n",
" z_neg_incorrect[z_neg_incorrect==0]= -1\n",
" z_neg_incorrect = (z * z_neg_incorrect).tolist()\n",
" \n",
" # Also want to display percentages\n",
" z_with_pct = []\n",
" for row in z:\n",
" new_row = []\n",
" for val in row:\n",
" new_row.append('{}<br>({}%)'.format(val, np.around(val/np.sum(z)*100,1)))\n",
" z_with_pct.append(new_row)\n",
" \n",
" fig = ff.create_annotated_heatmap(z_neg_incorrect, x=x, y=y, annotation_text=z_with_pct)\n",
" heatmap.children[1].data[0].z = z_neg_incorrect\n",
" heatmap.children[1].layout.annotations = fig.layout.annotations\n",
"\n",
"# Hook changes to beach filter to update confusion heatmaps\n",
"beach_select.observe(update_heatmaps, names=\"value\")\n",
"\n",
"# Display our widgets\n",
"widgets.VBox([beach_container, widgets.VBox(heatmaps)])\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-11T22:17:29.215527Z",
"start_time": "2018-12-11T22:16:56.023Z"
},
"hidden": true
},
"outputs": [],
"source": [
"# To output to file\n",
"# fig = heatmaps[1].children[1]\n",
"# img_bytes = pio.write_image(fig, 'fig1.png',format='png', width=600, height=400, scale=5)\n",
"\n",
"# fig = g_profiles\n",
"# img_bytes = pio.write_image(fig, 'fig1.png',format='png', width=600, height=200, scale=5)\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-03T23:02:47.179180Z",
"start_time": "2018-12-03T23:02:46.367273Z"
}
},
"source": [
"## Identify sites with no results"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Check forecast TWL\n",
"Most probable explanation for TWL's which are NaN'ed is that the prestorm dune toe are not defined."
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-13T02:09:10.914191Z",
"start_time": "2018-12-13T02:09:09.913622Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The following sites have no slope defined in the twl csv file:\n",
"['ENTRA0078', 'ENTRA0079', 'MANNING0109']\n",
"\n",
"The following sites have no R_high defined in the twl csv file:\n",
"['ENTRA0078', 'ENTRA0079', 'MANNING0109']\n"
]
}
],
"source": [
"df_twls = twls['forecasted']['mean_slope_sto06']\n",
"\n",
"slope_mask = df_twls.groupby('site_id').agg({'beta': lambda x: x.isnull().sum() == len(x)}).beta\n",
"print('The following sites have no slope defined in the twl csv file:')\n",
"print(slope_mask.index[slope_mask].tolist())\n",
"print()\n",
"\n",
"R_high_mask = df_twls.groupby('site_id').agg({'R_high': lambda x: x.isnull().sum() == len(x)}).R_high\n",
"print('The following sites have no R_high defined in the twl csv file:')\n",
"print(slope_mask.index[slope_mask].tolist())\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Check observed impacts\n",
"Find sites which have no observed impacts. If we do not identify an observed storm regime, the site cannot be included when we're trying to compare predicted and observed impacts."
]
},
{
"cell_type": "code",
"execution_count": 58,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-13T02:09:14.785919Z",
"start_time": "2018-12-13T02:09:14.520986Z"
},
"scrolled": false
},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>prestorm_swash_vol</th>\n",
" <th>poststorm_swash_vol</th>\n",
" <th>swash_vol_change</th>\n",
" <th>swash_pct_change</th>\n",
" <th>prestorm_dune_face_vol</th>\n",
" <th>poststorm_dune_face_vol</th>\n",
" <th>dune_face_vol_change</th>\n",
" <th>dune_face_pct_change</th>\n",
" <th>storm_regime</th>\n",
" </tr>\n",
" <tr>\n",
" <th>site_id</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>AVOCAn0009</th>\n",
" <td>4.5783</td>\n",
" <td>0.1110</td>\n",
" <td>4.4673</td>\n",
" <td>97.5750</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0001</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0002</th>\n",
" <td>97.9463</td>\n",
" <td>26.6638</td>\n",
" <td>71.2825</td>\n",
" <td>72.7771</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0003</th>\n",
" <td>70.7306</td>\n",
" <td>40.2020</td>\n",
" <td>30.7232</td>\n",
" <td>43.4369</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0004</th>\n",
" <td>98.2859</td>\n",
" <td>45.4986</td>\n",
" <td>52.6330</td>\n",
" <td>53.5509</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0005</th>\n",
" <td>95.5841</td>\n",
" <td>54.9753</td>\n",
" <td>40.5733</td>\n",
" <td>42.4478</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0006</th>\n",
" <td>113.0441</td>\n",
" <td>67.8912</td>\n",
" <td>45.2582</td>\n",
" <td>40.0359</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0007</th>\n",
" <td>65.3283</td>\n",
" <td>44.2821</td>\n",
" <td>21.4544</td>\n",
" <td>32.8409</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>AVOCAs0008</th>\n",
" <td>52.3933</td>\n",
" <td>45.2243</td>\n",
" <td>7.1728</td>\n",
" <td>13.6904</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BILG0001</th>\n",
" <td>20.3405</td>\n",
" <td>7.6207</td>\n",
" <td>12.7198</td>\n",
" <td>62.5344</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BILG0002</th>\n",
" <td>156.4205</td>\n",
" <td>98.1716</td>\n",
" <td>58.1659</td>\n",
" <td>37.1856</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOAT0001</th>\n",
" <td>23.8361</td>\n",
" <td>23.6865</td>\n",
" <td>-0.0926</td>\n",
" <td>-0.3885</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOAT0002</th>\n",
" <td>38.8398</td>\n",
" <td>14.0819</td>\n",
" <td>24.7579</td>\n",
" <td>63.7436</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOAT0003</th>\n",
" <td>73.6809</td>\n",
" <td>17.8545</td>\n",
" <td>55.8264</td>\n",
" <td>75.7678</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOAT0004</th>\n",
" <td>73.1954</td>\n",
" <td>23.1583</td>\n",
" <td>50.0372</td>\n",
" <td>68.3610</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOAT0005</th>\n",
" <td>53.5122</td>\n",
" <td>22.4537</td>\n",
" <td>31.0585</td>\n",
" <td>58.0400</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>BOOM0001</th>\n",
" <td>236.4540</td>\n",
" <td>218.4918</td>\n",
" <td>20.8725</td>\n",
" <td>8.8273</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>CATHIE0024</th>\n",
" <td>63.6452</td>\n",
" <td>38.4261</td>\n",
" <td>25.2191</td>\n",
" <td>39.6245</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>CATHIE0026</th>\n",
" <td>75.1334</td>\n",
" <td>43.7179</td>\n",
" <td>31.0940</td>\n",
" <td>41.3851</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>CRESn0069</th>\n",
" <td>37.5896</td>\n",
" <td>8.3495</td>\n",
" <td>29.2401</td>\n",
" <td>77.7877</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DEEWHYn0008</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DEEWHYn0009</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DEEWHYs0005</th>\n",
" <td>62.3514</td>\n",
" <td>24.9797</td>\n",
" <td>37.3716</td>\n",
" <td>59.9372</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DEEWHYs0008</th>\n",
" <td>1.0688</td>\n",
" <td>1.3640</td>\n",
" <td>0.0000</td>\n",
" <td>0.0000</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DIAMONDn0023</th>\n",
" <td>67.9416</td>\n",
" <td>21.1812</td>\n",
" <td>46.7603</td>\n",
" <td>68.8244</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DIAMONDs0006</th>\n",
" <td>74.9357</td>\n",
" <td>42.4382</td>\n",
" <td>32.2536</td>\n",
" <td>43.0416</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DIAMONDs0007</th>\n",
" <td>153.7639</td>\n",
" <td>127.9469</td>\n",
" <td>26.1595</td>\n",
" <td>17.0128</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DUNBn0031</th>\n",
" <td>36.5301</td>\n",
" <td>6.3289</td>\n",
" <td>30.2012</td>\n",
" <td>82.6748</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>DUNBn0055</th>\n",
" <td>189.5283</td>\n",
" <td>134.2760</td>\n",
" <td>56.7139</td>\n",
" <td>29.9237</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>ELIZA0002</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0170</th>\n",
" <td>198.2785</td>\n",
" <td>216.6368</td>\n",
" <td>-18.6067</td>\n",
" <td>-9.3841</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0175</th>\n",
" <td>136.2126</td>\n",
" <td>114.1715</td>\n",
" <td>22.4984</td>\n",
" <td>16.5171</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0179</th>\n",
" <td>67.7795</td>\n",
" <td>45.3981</td>\n",
" <td>22.3815</td>\n",
" <td>33.0210</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0180</th>\n",
" <td>166.0813</td>\n",
" <td>149.5195</td>\n",
" <td>15.3441</td>\n",
" <td>9.2389</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0181</th>\n",
" <td>90.1147</td>\n",
" <td>98.9808</td>\n",
" <td>-9.1107</td>\n",
" <td>-10.1102</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0182</th>\n",
" <td>67.8622</td>\n",
" <td>86.0118</td>\n",
" <td>-18.1671</td>\n",
" <td>-26.7705</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0183</th>\n",
" <td>125.9085</td>\n",
" <td>137.7342</td>\n",
" <td>-12.6233</td>\n",
" <td>-10.0257</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0184</th>\n",
" <td>146.6586</td>\n",
" <td>123.5371</td>\n",
" <td>23.2603</td>\n",
" <td>15.8602</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0185</th>\n",
" <td>141.5421</td>\n",
" <td>142.7279</td>\n",
" <td>-1.2619</td>\n",
" <td>-0.8915</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0186</th>\n",
" <td>115.9148</td>\n",
" <td>123.1507</td>\n",
" <td>-7.8392</td>\n",
" <td>-6.7629</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0187</th>\n",
" <td>126.5519</td>\n",
" <td>147.4371</td>\n",
" <td>-22.4452</td>\n",
" <td>-17.7359</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0188</th>\n",
" <td>345.5234</td>\n",
" <td>353.8766</td>\n",
" <td>-11.3322</td>\n",
" <td>-3.2797</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0189</th>\n",
" <td>171.6354</td>\n",
" <td>134.9192</td>\n",
" <td>35.8697</td>\n",
" <td>20.8988</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCNs0190</th>\n",
" <td>151.4113</td>\n",
" <td>116.6381</td>\n",
" <td>35.0161</td>\n",
" <td>23.1264</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCS0014</th>\n",
" <td>98.4991</td>\n",
" <td>57.3495</td>\n",
" <td>40.9006</td>\n",
" <td>41.5238</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>STOCS0043</th>\n",
" <td>36.4256</td>\n",
" <td>11.7208</td>\n",
" <td>24.7048</td>\n",
" <td>67.8225</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0005</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0015</th>\n",
" <td>56.2724</td>\n",
" <td>16.0428</td>\n",
" <td>40.2296</td>\n",
" <td>71.4908</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0016</th>\n",
" <td>97.8849</td>\n",
" <td>39.8432</td>\n",
" <td>58.0417</td>\n",
" <td>59.2958</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0017</th>\n",
" <td>36.5683</td>\n",
" <td>8.5380</td>\n",
" <td>28.0303</td>\n",
" <td>76.6520</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0018</th>\n",
" <td>42.1423</td>\n",
" <td>10.5498</td>\n",
" <td>31.5925</td>\n",
" <td>74.9662</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0019</th>\n",
" <td>39.6097</td>\n",
" <td>9.2404</td>\n",
" <td>30.3693</td>\n",
" <td>76.6714</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0020</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0021</th>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0022</th>\n",
" <td>1.1034</td>\n",
" <td>0.4478</td>\n",
" <td>0.6556</td>\n",
" <td>59.4166</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0023</th>\n",
" <td>4.4796</td>\n",
" <td>0.3356</td>\n",
" <td>4.1440</td>\n",
" <td>92.5081</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0024</th>\n",
" <td>61.8478</td>\n",
" <td>31.3007</td>\n",
" <td>30.5470</td>\n",
" <td>49.3907</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0025</th>\n",
" <td>45.9707</td>\n",
" <td>14.6125</td>\n",
" <td>31.3582</td>\n",
" <td>68.2134</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0026</th>\n",
" <td>32.8591</td>\n",
" <td>12.9479</td>\n",
" <td>19.9112</td>\n",
" <td>60.5957</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" <tr>\n",
" <th>WAMBE0027</th>\n",
" <td>26.4132</td>\n",
" <td>18.7142</td>\n",
" <td>7.6990</td>\n",
" <td>29.1484</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" <td>NaN</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>242 rows × 9 columns</p>\n",
"</div>"
],
"text/plain": [
" prestorm_swash_vol poststorm_swash_vol swash_vol_change \\\n",
"site_id \n",
"AVOCAn0009 4.5783 0.1110 4.4673 \n",
"AVOCAs0001 NaN NaN NaN \n",
"AVOCAs0002 97.9463 26.6638 71.2825 \n",
"AVOCAs0003 70.7306 40.2020 30.7232 \n",
"AVOCAs0004 98.2859 45.4986 52.6330 \n",
"AVOCAs0005 95.5841 54.9753 40.5733 \n",
"AVOCAs0006 113.0441 67.8912 45.2582 \n",
"AVOCAs0007 65.3283 44.2821 21.4544 \n",
"AVOCAs0008 52.3933 45.2243 7.1728 \n",
"BILG0001 20.3405 7.6207 12.7198 \n",
"BILG0002 156.4205 98.1716 58.1659 \n",
"BOAT0001 23.8361 23.6865 -0.0926 \n",
"BOAT0002 38.8398 14.0819 24.7579 \n",
"BOAT0003 73.6809 17.8545 55.8264 \n",
"BOAT0004 73.1954 23.1583 50.0372 \n",
"BOAT0005 53.5122 22.4537 31.0585 \n",
"BOOM0001 236.4540 218.4918 20.8725 \n",
"CATHIE0024 63.6452 38.4261 25.2191 \n",
"CATHIE0026 75.1334 43.7179 31.0940 \n",
"CRESn0069 37.5896 8.3495 29.2401 \n",
"DEEWHYn0008 NaN NaN NaN \n",
"DEEWHYn0009 NaN NaN NaN \n",
"DEEWHYs0005 62.3514 24.9797 37.3716 \n",
"DEEWHYs0008 1.0688 1.3640 0.0000 \n",
"DIAMONDn0023 67.9416 21.1812 46.7603 \n",
"DIAMONDs0006 74.9357 42.4382 32.2536 \n",
"DIAMONDs0007 153.7639 127.9469 26.1595 \n",
"DUNBn0031 36.5301 6.3289 30.2012 \n",
"DUNBn0055 189.5283 134.2760 56.7139 \n",
"ELIZA0002 NaN NaN NaN \n",
"... ... ... ... \n",
"STOCNs0170 198.2785 216.6368 -18.6067 \n",
"STOCNs0175 136.2126 114.1715 22.4984 \n",
"STOCNs0179 67.7795 45.3981 22.3815 \n",
"STOCNs0180 166.0813 149.5195 15.3441 \n",
"STOCNs0181 90.1147 98.9808 -9.1107 \n",
"STOCNs0182 67.8622 86.0118 -18.1671 \n",
"STOCNs0183 125.9085 137.7342 -12.6233 \n",
"STOCNs0184 146.6586 123.5371 23.2603 \n",
"STOCNs0185 141.5421 142.7279 -1.2619 \n",
"STOCNs0186 115.9148 123.1507 -7.8392 \n",
"STOCNs0187 126.5519 147.4371 -22.4452 \n",
"STOCNs0188 345.5234 353.8766 -11.3322 \n",
"STOCNs0189 171.6354 134.9192 35.8697 \n",
"STOCNs0190 151.4113 116.6381 35.0161 \n",
"STOCS0014 98.4991 57.3495 40.9006 \n",
"STOCS0043 36.4256 11.7208 24.7048 \n",
"WAMBE0005 NaN NaN NaN \n",
"WAMBE0015 56.2724 16.0428 40.2296 \n",
"WAMBE0016 97.8849 39.8432 58.0417 \n",
"WAMBE0017 36.5683 8.5380 28.0303 \n",
"WAMBE0018 42.1423 10.5498 31.5925 \n",
"WAMBE0019 39.6097 9.2404 30.3693 \n",
"WAMBE0020 NaN NaN NaN \n",
"WAMBE0021 NaN NaN NaN \n",
"WAMBE0022 1.1034 0.4478 0.6556 \n",
"WAMBE0023 4.4796 0.3356 4.1440 \n",
"WAMBE0024 61.8478 31.3007 30.5470 \n",
"WAMBE0025 45.9707 14.6125 31.3582 \n",
"WAMBE0026 32.8591 12.9479 19.9112 \n",
"WAMBE0027 26.4132 18.7142 7.6990 \n",
"\n",
" swash_pct_change prestorm_dune_face_vol \\\n",
"site_id \n",
"AVOCAn0009 97.5750 NaN \n",
"AVOCAs0001 NaN NaN \n",
"AVOCAs0002 72.7771 NaN \n",
"AVOCAs0003 43.4369 NaN \n",
"AVOCAs0004 53.5509 NaN \n",
"AVOCAs0005 42.4478 NaN \n",
"AVOCAs0006 40.0359 NaN \n",
"AVOCAs0007 32.8409 NaN \n",
"AVOCAs0008 13.6904 NaN \n",
"BILG0001 62.5344 NaN \n",
"BILG0002 37.1856 NaN \n",
"BOAT0001 -0.3885 NaN \n",
"BOAT0002 63.7436 NaN \n",
"BOAT0003 75.7678 NaN \n",
"BOAT0004 68.3610 NaN \n",
"BOAT0005 58.0400 NaN \n",
"BOOM0001 8.8273 NaN \n",
"CATHIE0024 39.6245 0.0 \n",
"CATHIE0026 41.3851 NaN \n",
"CRESn0069 77.7877 NaN \n",
"DEEWHYn0008 NaN NaN \n",
"DEEWHYn0009 NaN NaN \n",
"DEEWHYs0005 59.9372 NaN \n",
"DEEWHYs0008 0.0000 NaN \n",
"DIAMONDn0023 68.8244 NaN \n",
"DIAMONDs0006 43.0416 NaN \n",
"DIAMONDs0007 17.0128 NaN \n",
"DUNBn0031 82.6748 NaN \n",
"DUNBn0055 29.9237 NaN \n",
"ELIZA0002 NaN NaN \n",
"... ... ... \n",
"STOCNs0170 -9.3841 NaN \n",
"STOCNs0175 16.5171 NaN \n",
"STOCNs0179 33.0210 NaN \n",
"STOCNs0180 9.2389 NaN \n",
"STOCNs0181 -10.1102 NaN \n",
"STOCNs0182 -26.7705 NaN \n",
"STOCNs0183 -10.0257 NaN \n",
"STOCNs0184 15.8602 NaN \n",
"STOCNs0185 -0.8915 NaN \n",
"STOCNs0186 -6.7629 NaN \n",
"STOCNs0187 -17.7359 NaN \n",
"STOCNs0188 -3.2797 NaN \n",
"STOCNs0189 20.8988 NaN \n",
"STOCNs0190 23.1264 NaN \n",
"STOCS0014 41.5238 NaN \n",
"STOCS0043 67.8225 NaN \n",
"WAMBE0005 NaN NaN \n",
"WAMBE0015 71.4908 NaN \n",
"WAMBE0016 59.2958 NaN \n",
"WAMBE0017 76.6520 NaN \n",
"WAMBE0018 74.9662 NaN \n",
"WAMBE0019 76.6714 NaN \n",
"WAMBE0020 NaN NaN \n",
"WAMBE0021 NaN NaN \n",
"WAMBE0022 59.4166 NaN \n",
"WAMBE0023 92.5081 NaN \n",
"WAMBE0024 49.3907 NaN \n",
"WAMBE0025 68.2134 NaN \n",
"WAMBE0026 60.5957 NaN \n",
"WAMBE0027 29.1484 NaN \n",
"\n",
" poststorm_dune_face_vol dune_face_vol_change \\\n",
"site_id \n",
"AVOCAn0009 NaN NaN \n",
"AVOCAs0001 NaN NaN \n",
"AVOCAs0002 NaN NaN \n",
"AVOCAs0003 NaN NaN \n",
"AVOCAs0004 NaN NaN \n",
"AVOCAs0005 NaN NaN \n",
"AVOCAs0006 NaN NaN \n",
"AVOCAs0007 NaN NaN \n",
"AVOCAs0008 NaN NaN \n",
"BILG0001 NaN NaN \n",
"BILG0002 NaN NaN \n",
"BOAT0001 NaN NaN \n",
"BOAT0002 NaN NaN \n",
"BOAT0003 NaN NaN \n",
"BOAT0004 NaN NaN \n",
"BOAT0005 NaN NaN \n",
"BOOM0001 NaN NaN \n",
"CATHIE0024 0.0 0.0 \n",
"CATHIE0026 NaN NaN \n",
"CRESn0069 NaN NaN \n",
"DEEWHYn0008 NaN NaN \n",
"DEEWHYn0009 NaN NaN \n",
"DEEWHYs0005 NaN NaN \n",
"DEEWHYs0008 NaN NaN \n",
"DIAMONDn0023 NaN NaN \n",
"DIAMONDs0006 NaN NaN \n",
"DIAMONDs0007 NaN NaN \n",
"DUNBn0031 NaN NaN \n",
"DUNBn0055 NaN NaN \n",
"ELIZA0002 NaN NaN \n",
"... ... ... \n",
"STOCNs0170 NaN NaN \n",
"STOCNs0175 NaN NaN \n",
"STOCNs0179 NaN NaN \n",
"STOCNs0180 NaN NaN \n",
"STOCNs0181 NaN NaN \n",
"STOCNs0182 NaN NaN \n",
"STOCNs0183 NaN NaN \n",
"STOCNs0184 NaN NaN \n",
"STOCNs0185 NaN NaN \n",
"STOCNs0186 NaN NaN \n",
"STOCNs0187 NaN NaN \n",
"STOCNs0188 NaN NaN \n",
"STOCNs0189 NaN NaN \n",
"STOCNs0190 NaN NaN \n",
"STOCS0014 NaN NaN \n",
"STOCS0043 NaN NaN \n",
"WAMBE0005 NaN NaN \n",
"WAMBE0015 NaN NaN \n",
"WAMBE0016 NaN NaN \n",
"WAMBE0017 NaN NaN \n",
"WAMBE0018 NaN NaN \n",
"WAMBE0019 NaN NaN \n",
"WAMBE0020 NaN NaN \n",
"WAMBE0021 NaN NaN \n",
"WAMBE0022 NaN NaN \n",
"WAMBE0023 NaN NaN \n",
"WAMBE0024 NaN NaN \n",
"WAMBE0025 NaN NaN \n",
"WAMBE0026 NaN NaN \n",
"WAMBE0027 NaN NaN \n",
"\n",
" dune_face_pct_change storm_regime \n",
"site_id \n",
"AVOCAn0009 NaN NaN \n",
"AVOCAs0001 NaN NaN \n",
"AVOCAs0002 NaN NaN \n",
"AVOCAs0003 NaN NaN \n",
"AVOCAs0004 NaN NaN \n",
"AVOCAs0005 NaN NaN \n",
"AVOCAs0006 NaN NaN \n",
"AVOCAs0007 NaN NaN \n",
"AVOCAs0008 NaN NaN \n",
"BILG0001 NaN NaN \n",
"BILG0002 NaN NaN \n",
"BOAT0001 NaN NaN \n",
"BOAT0002 NaN NaN \n",
"BOAT0003 NaN NaN \n",
"BOAT0004 NaN NaN \n",
"BOAT0005 NaN NaN \n",
"BOOM0001 NaN NaN \n",
"CATHIE0024 NaN NaN \n",
"CATHIE0026 NaN NaN \n",
"CRESn0069 NaN NaN \n",
"DEEWHYn0008 NaN NaN \n",
"DEEWHYn0009 NaN NaN \n",
"DEEWHYs0005 NaN NaN \n",
"DEEWHYs0008 NaN NaN \n",
"DIAMONDn0023 NaN NaN \n",
"DIAMONDs0006 NaN NaN \n",
"DIAMONDs0007 NaN NaN \n",
"DUNBn0031 NaN NaN \n",
"DUNBn0055 NaN NaN \n",
"ELIZA0002 NaN NaN \n",
"... ... ... \n",
"STOCNs0170 NaN NaN \n",
"STOCNs0175 NaN NaN \n",
"STOCNs0179 NaN NaN \n",
"STOCNs0180 NaN NaN \n",
"STOCNs0181 NaN NaN \n",
"STOCNs0182 NaN NaN \n",
"STOCNs0183 NaN NaN \n",
"STOCNs0184 NaN NaN \n",
"STOCNs0185 NaN NaN \n",
"STOCNs0186 NaN NaN \n",
"STOCNs0187 NaN NaN \n",
"STOCNs0188 NaN NaN \n",
"STOCNs0189 NaN NaN \n",
"STOCNs0190 NaN NaN \n",
"STOCS0014 NaN NaN \n",
"STOCS0043 NaN NaN \n",
"WAMBE0005 NaN NaN \n",
"WAMBE0015 NaN NaN \n",
"WAMBE0016 NaN NaN \n",
"WAMBE0017 NaN NaN \n",
"WAMBE0018 NaN NaN \n",
"WAMBE0019 NaN NaN \n",
"WAMBE0020 NaN NaN \n",
"WAMBE0021 NaN NaN \n",
"WAMBE0022 NaN NaN \n",
"WAMBE0023 NaN NaN \n",
"WAMBE0024 NaN NaN \n",
"WAMBE0025 NaN NaN \n",
"WAMBE0026 NaN NaN \n",
"WAMBE0027 NaN NaN \n",
"\n",
"[242 rows x 9 columns]"
]
},
"execution_count": 58,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df_impacts = impacts['observed']\n",
"df_no_obs_impacts = df_impacts[df_impacts.storm_regime.isnull()]\n",
"no_obs_impacts_sites = df_no_obs_impacts.index\n",
"\n",
"df_no_obs_impacts\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"It looks like the problem comes from if we cannot identify the prestorm and post storm swash and berm volume changes."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Check pre and post storm profiles\n",
"It looks like, for some reason, there are no post storm profiles (`STUART0011`) for some of the profiles. Not sure if this is a processing error, or it hasn't been included in the dataset."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Check prestorm dune crests\n",
"If there are no prestorm dune crests defined, we cannot define a mean slope and hence we won't be able to get observed impacts"
]
},
{
"cell_type": "code",
"execution_count": 74,
"metadata": {
"ExecuteTime": {
"end_time": "2018-12-13T02:37:07.225965Z",
"start_time": "2018-12-13T02:37:07.213921Z"
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"231 sites have no dune crests:\n",
"AVOCAn0009\n",
"AVOCAs0001\n",
"AVOCAs0002\n",
"AVOCAs0003\n",
"AVOCAs0004\n",
"AVOCAs0005\n",
"AVOCAs0006\n",
"AVOCAs0007\n",
"AVOCAs0008\n",
"BILG0001\n",
"BILG0002\n",
"BOAT0001\n",
"BOAT0002\n",
"BOAT0003\n",
"BOAT0004\n",
"BOAT0005\n",
"BOOM0001\n",
"CATHIE0026\n",
"CRESn0069\n",
"DEEWHYn0008\n",
"DEEWHYn0009\n",
"DEEWHYs0005\n",
"DEEWHYs0008\n",
"DIAMONDs0006\n",
"DIAMONDs0007\n",
"ENTRA0005\n",
"ENTRA0006\n",
"ENTRA0077\n",
"ENTRA0078\n",
"ENTRA0079\n",
"FOST0003\n",
"GRANTSn0004\n",
"GRANTSn0005\n",
"GRANTSn0006\n",
"GRANTSn0007\n",
"GRANTSn0008\n",
"GRANTSn0009\n",
"GRANTSn0021\n",
"GRANTSs0014\n",
"HARGs0003\n",
"HARGs0004\n",
"HARGs0005\n",
"HARR0056\n",
"LHOUSE0001\n",
"LHOUSE0002\n",
"LHOUSE0003\n",
"LHOUSE0004\n",
"LHOUSE0012\n",
"LHOUSE0013\n",
"LHOUSEs0015\n",
"MACM0008\n",
"MACM0012\n",
"MACM0013\n",
"MACM0014\n",
"MACM0015\n",
"MACM0016\n",
"MANNING0001\n",
"MANNING0002\n",
"MANNING0003\n",
"MANNING0004\n",
"MANNING0005\n",
"MANNING0101\n",
"MANNING0102\n",
"MANNING0103\n",
"MANNING0104\n",
"MANNING0105\n",
"MANNING0106\n",
"MANNING0107\n",
"MANNING0108\n",
"MANNING0109\n",
"MONA0001\n",
"MONA0002\n",
"MONA0003\n",
"MONA0014\n",
"MONA0015\n",
"MONA0016\n",
"MONA0017\n",
"MONA0018\n",
"MONA0019\n",
"MONA0020\n",
"MONA0021\n",
"NAMB0027\n",
"NAMB0041\n",
"NARRA0001\n",
"NARRA0028\n",
"NARRA0035\n",
"NINEMn0050\n",
"OLDBAR0035\n",
"PEARLn0001\n",
"PEARLn0002\n",
"PEARLn0003\n",
"PEARLn0004\n",
"PEARLs0003\n",
"PEARLs0004\n",
"PEARLs0005\n",
"STOCNn0012\n",
"STOCNn0013\n",
"STOCNn0014\n",
"STOCNn0015\n",
"STOCNn0016\n",
"STOCNn0017\n",
"STOCNn0018\n",
"STOCNn0019\n",
"STOCNn0020\n",
"STOCNn0021\n",
"STOCNn0022\n",
"STOCNn0023\n",
"STOCNn0024\n",
"STOCNn0025\n",
"STOCNn0026\n",
"STOCNn0027\n",
"STOCNn0028\n",
"STOCNn0029\n",
"STOCNn0030\n",
"STOCNn0031\n",
"STOCNn0032\n",
"STOCNn0033\n",
"STOCNn0034\n",
"STOCNn0035\n",
"STOCNn0036\n",
"STOCNn0037\n",
"STOCNn0038\n",
"STOCNn0039\n",
"STOCNn0044\n",
"STOCNn0059\n",
"STOCNn0062\n",
"STOCNn0063\n",
"STOCNn0064\n",
"STOCNn0065\n",
"STOCNs0022\n",
"STOCNs0025\n",
"STOCNs0026\n",
"STOCNs0031\n",
"STOCNs0045\n",
"STOCNs0048\n",
"STOCNs0049\n",
"STOCNs0053\n",
"STOCNs0055\n",
"STOCNs0056\n",
"STOCNs0057\n",
"STOCNs0058\n",
"STOCNs0059\n",
"STOCNs0060\n",
"STOCNs0061\n",
"STOCNs0062\n",
"STOCNs0073\n",
"STOCNs0079\n",
"STOCNs0088\n",
"STOCNs0089\n",
"STOCNs0090\n",
"STOCNs0091\n",
"STOCNs0092\n",
"STOCNs0093\n",
"STOCNs0094\n",
"STOCNs0095\n",
"STOCNs0096\n",
"STOCNs0097\n",
"STOCNs0098\n",
"STOCNs0099\n",
"STOCNs0100\n",
"STOCNs0101\n",
"STOCNs0102\n",
"STOCNs0103\n",
"STOCNs0104\n",
"STOCNs0105\n",
"STOCNs0106\n",
"STOCNs0107\n",
"STOCNs0108\n",
"STOCNs0109\n",
"STOCNs0110\n",
"STOCNs0111\n",
"STOCNs0112\n",
"STOCNs0113\n",
"STOCNs0114\n",
"STOCNs0115\n",
"STOCNs0116\n",
"STOCNs0117\n",
"STOCNs0118\n",
"STOCNs0119\n",
"STOCNs0120\n",
"STOCNs0121\n",
"STOCNs0122\n",
"STOCNs0123\n",
"STOCNs0124\n",
"STOCNs0125\n",
"STOCNs0126\n",
"STOCNs0127\n",
"STOCNs0128\n",
"STOCNs0137\n",
"STOCNs0141\n",
"STOCNs0144\n",
"STOCNs0150\n",
"STOCNs0155\n",
"STOCNs0156\n",
"STOCNs0157\n",
"STOCNs0158\n",
"STOCNs0159\n",
"STOCNs0160\n",
"STOCNs0167\n",
"STOCNs0168\n",
"STOCNs0169\n",
"STOCNs0170\n",
"STOCNs0175\n",
"STOCNs0179\n",
"STOCNs0180\n",
"STOCNs0181\n",
"STOCNs0182\n",
"STOCNs0183\n",
"STOCNs0184\n",
"STOCNs0185\n",
"STOCNs0186\n",
"STOCNs0187\n",
"STOCNs0188\n",
"STOCNs0189\n",
"STOCNs0190\n",
"STOCS0014\n",
"STOCS0043\n",
"WAMBE0005\n",
"WAMBE0015\n",
"WAMBE0016\n",
"WAMBE0017\n",
"WAMBE0018\n",
"WAMBE0019\n",
"WAMBE0020\n",
"WAMBE0021\n",
"WAMBE0022\n",
"WAMBE0023\n",
"WAMBE0024\n",
"WAMBE0025\n",
"WAMBE0026\n",
"WAMBE0027\n"
]
}
],
"source": [
"df_no_crests = df_profile_features.query('profile_type==\"prestorm\" & (dune_crest_x != dune_crest_x)')\n",
"print('{} sites have no dune crests:'.format(len(df_no_crests)))\n",
"\n",
"for site_id in df_no_crests.index.tolist():\n",
" print(site_id)"
]
}
],
"metadata": {
"hide_input": false,
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
},
"toc": {
"base_numbering": 1,
"nav_menu": {
"height": "47px",
"width": "262px"
},
"number_sections": true,
"sideBar": true,
"skip_h1_title": false,
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": false,
"toc_position": {
"height": "656px",
"left": "508px",
"top": "90px",
"width": "218.797px"
},
"toc_section_display": true,
"toc_window_display": true
},
"varInspector": {
"cols": {
"lenName": 16,
"lenType": 16,
"lenVar": 40
},
"kernels_config": {
"python": {
"delete_cmd_postfix": "",
"delete_cmd_prefix": "del ",
"library": "var_list.py",
"varRefreshCmd": "print(var_dic_list())"
},
"r": {
"delete_cmd_postfix": ") ",
"delete_cmd_prefix": "rm(",
"library": "var_list.r",
"varRefreshCmd": "cat(var_dic_list()) "
}
},
"types_to_exclude": [
"module",
"function",
"builtin_function_or_method",
"instance",
"_Feature"
],
"window_display": false
}
},
"nbformat": 4,
"nbformat_minor": 2
}