Update notebooks

develop
Chris Leaman 6 years ago
parent 6ffdd2611d
commit 66c7b25cc4

@ -90,8 +90,7 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n",
" 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n",
@ -99,8 +98,7 @@
"\n",
"twls = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n",
" 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n",
" 'postintertidal_slope_sto06':df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0, 1]),\n",
" }\n",
"}\n",
"print('Done!')"
@ -260,6 +258,7 @@
" title='Bed Profiles',\n",
" height=300,\n",
" legend=dict(font={'size': 10}),\n",
" showlegend=False,\n",
" margin=dict(t=50, b=50, l=50, r=20),\n",
" xaxis=dict(\n",
" title='x (m)',\n",

@ -10,9 +10,7 @@
},
{
"cell_type": "markdown",
"metadata": {
"heading_collapsed": true
},
"metadata": {},
"source": [
"## Setup notebook\n",
"Import our required packages and set default plotting options."
@ -21,9 +19,7 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hidden": true
},
"metadata": {},
"outputs": [],
"source": [
"# Enable autoreloading of our modules. \n",
@ -37,9 +33,7 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hidden": true
},
"metadata": {},
"outputs": [],
"source": [
"from IPython.core.debugger import set_trace\n",
@ -66,6 +60,7 @@
"from scipy import stats\n",
"from sklearn.metrics import confusion_matrix\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.ticker import MultipleLocator\n",
"from matplotlib.lines import Line2D\n",
"from cycler import cycler\n",
"from scipy.interpolate import interp1d\n",
@ -75,9 +70,7 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"hidden": true
},
"metadata": {},
"outputs": [],
"source": [
"# Matplot lib default settings\n",
@ -128,11 +121,22 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv',index_col=[0]),\n",
" 'mean_slope_hol86': df_from_csv('impacts_forecasted_mean_slope_hol86.csv',index_col=[0]),\n",
" 'mean_slope_nie91': df_from_csv('impacts_forecasted_mean_slope_nie91.csv',index_col=[0]),\n",
" 'mean_slope_pow18': df_from_csv('impacts_forecasted_mean_slope_pow18.csv',index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv',index_col=[0]),\n",
" 'postintertidal_slope_hol86': df_from_csv('impacts_forecasted_postintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'postintertidal_slope_nie91': df_from_csv('impacts_forecasted_postintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'postintertidal_slope_pow18': df_from_csv('impacts_forecasted_postintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'postmean_slope_hol86': df_from_csv('impacts_forecasted_postmean_slope_hol86.csv', index_col=[0]),\n",
" 'postmean_slope_nie91': df_from_csv('impacts_forecasted_postmean_slope_nie91.csv', index_col=[0]),\n",
" 'postmean_slope_pow18': df_from_csv('impacts_forecasted_postmean_slope_pow18.csv', index_col=[0]),\n",
" 'postmean_slope_sto06': df_from_csv('impacts_forecasted_postmean_slope_sto06.csv', index_col=[0]),\n",
" 'preintertidal_slope_hol86': df_from_csv('impacts_forecasted_preintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'preintertidal_slope_nie91': df_from_csv('impacts_forecasted_preintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'preintertidal_slope_pow18': df_from_csv('impacts_forecasted_preintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'preintertidal_slope_sto06': df_from_csv('impacts_forecasted_preintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'premean_slope_hol86': df_from_csv('impacts_forecasted_premean_slope_hol86.csv', index_col=[0]),\n",
" 'premean_slope_nie91': df_from_csv('impacts_forecasted_premean_slope_nie91.csv', index_col=[0]),\n",
" 'premean_slope_pow18': df_from_csv('impacts_forecasted_premean_slope_pow18.csv', index_col=[0]),\n",
" 'premean_slope_sto06': df_from_csv('impacts_forecasted_premean_slope_sto06.csv', index_col=[0]),\n",
" },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n",
@ -140,11 +144,22 @@
"\n",
"twls = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv',index_col=[0,1]),\n",
" 'mean_slope_hol86': df_from_csv('twl_mean_slope_hol86.csv',index_col=[0,1]),\n",
" 'mean_slope_nie91': df_from_csv('twl_mean_slope_nie91.csv',index_col=[0,1]),\n",
" 'mean_slope_pow18': df_from_csv('twl_mean_slope_pow18.csv',index_col=[0,1]),\n",
" 'mean_slope_sto06': df_from_csv('twl_mean_slope_sto06.csv',index_col=[0,1]),\n",
" 'postintertidal_slope_hol86': df_from_csv('twl_postintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_nie91': df_from_csv('twl_postintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_pow18': df_from_csv('twl_postintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_sto06': df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'postmean_slope_hol86': df_from_csv('twl_postmean_slope_hol86.csv', index_col=[0,1]),\n",
" 'postmean_slope_nie91': df_from_csv('twl_postmean_slope_nie91.csv', index_col=[0,1]),\n",
" 'postmean_slope_pow18': df_from_csv('twl_postmean_slope_pow18.csv', index_col=[0,1]),\n",
" 'postmean_slope_sto06': df_from_csv('twl_postmean_slope_sto06.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_hol86': df_from_csv('twl_preintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_nie91': df_from_csv('twl_preintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_pow18': df_from_csv('twl_preintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_sto06': df_from_csv('twl_preintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'premean_slope_hol86': df_from_csv('twl_premean_slope_hol86.csv', index_col=[0,1]),\n",
" 'premean_slope_nie91': df_from_csv('twl_premean_slope_nie91.csv', index_col=[0,1]),\n",
" 'premean_slope_pow18': df_from_csv('twl_premean_slope_pow18.csv', index_col=[0,1]),\n",
" 'premean_slope_sto06': df_from_csv('twl_premean_slope_sto06.csv', index_col=[0,1]),\n",
" }\n",
"}\n",
"print('Done!')"
@ -152,9 +167,7 @@
},
{
"cell_type": "markdown",
"metadata": {
"heading_collapsed": true
},
"metadata": {},
"source": [
"## Generate longshore plots for each beach"
]
@ -163,128 +176,152 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"hidden": true
"code_folding": []
},
"outputs": [],
"source": [
"beach = 'NARRA'\n",
"\n",
"df_obs_impacts = impacts['observed'].loc[impacts['observed'].index.str.\n",
" contains(beach)]\n",
"\n",
"# Get index for each site on the beach\n",
"n = [x for x in range(len(df_obs_impacts))][::-1]\n",
"n_sites = [x for x in df_obs_impacts.index][::-1]\n",
"\n",
"# Convert storm regimes to categorical datatype\n",
"cat_type = CategoricalDtype(\n",
" categories=['swash', 'collision', 'overwash', 'inundation'], ordered=True)\n",
"df_obs_impacts.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n",
"\n",
"# Create figure\n",
"f, (ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8) = plt.subplots(\n",
" 1,\n",
" 8,\n",
" sharey=True,\n",
" figsize=(18, 8),\n",
" gridspec_kw={'width_ratios': [4, 4, 2, 2, 2, 2, 2, 2]})\n",
"\n",
"# ax1: Impacts\n",
"\n",
"# Define colors for storm regime\n",
"cmap = {'swash': '#1a9850', 'collision': '#fee08b', 'overwash': '#d73027'}\n",
"\n",
"# Common marker style\n",
"marker_style = {\n",
" 's': 60,\n",
" 'linewidths': 0.7,\n",
" 'alpha': 1,\n",
" 'edgecolors': 'k',\n",
" 'marker': 'o',\n",
"}\n",
"\n",
"# Plot observed impacts\n",
"colors = [cmap.get(x) for x in df_obs_impacts.storm_regime]\n",
"colors = ['#d73027' if c is None else c for c in colors]\n",
"ax1.scatter([0 for x in n], n, color=colors, **marker_style)\n",
"\n",
"# Plot model impacts\n",
"for i, model in enumerate(impacts['forecasted']):\n",
"\n",
" # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n",
" index.str.contains(beach)]\n",
"\n",
" # Recast storm regimes as categorical data\n",
" df_model.storm_regime = df_model.storm_regime.astype(cat_type)\n",
"beaches = list(\n",
" set([\n",
" x[:-4] for x in df_profiles.index.get_level_values('site_id').unique()\n",
" ]))\n",
"\n",
"for beach in beaches:\n",
" \n",
" df_obs_impacts = impacts['observed'].loc[impacts['observed'].index.str.\n",
" contains(beach)]\n",
"\n",
" # Get index for each site on the beach\n",
" n = [x for x in range(len(df_obs_impacts))][::-1]\n",
" n_sites = [x for x in df_obs_impacts.index][::-1]\n",
"\n",
" # Convert storm regimes to categorical datatype\n",
" cat_type = CategoricalDtype(\n",
" categories=['swash', 'collision', 'overwash', 'inundation'],\n",
" ordered=True)\n",
" df_obs_impacts.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n",
"\n",
" # Create figure\n",
" \n",
" # Determine the height of the figure, based on the number of sites.\n",
" fig_height = max(6, 0.18 * len(n_sites))\n",
" f, (ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8) = plt.subplots(\n",
" 1,\n",
" 8,\n",
" sharey=True,\n",
" figsize=(18, fig_height),\n",
" gridspec_kw={'width_ratios': [4, 4, 2, 2, 2, 2, 2, 2]})\n",
"\n",
" # ax1: Impacts\n",
"\n",
" # Define colors for storm regime\n",
" cmap = {'swash': '#1a9850', 'collision': '#fee08b', 'overwash': '#d73027'}\n",
"\n",
" # Common marker style\n",
" marker_style = {\n",
" 's': 60,\n",
" 'linewidths': 0.7,\n",
" 'alpha': 1,\n",
" 'edgecolors': 'k',\n",
" 'marker': 'o',\n",
" }\n",
"\n",
" # Assign colors\n",
" colors = [cmap.get(x) for x in df_model.storm_regime]\n",
" # Plot observed impacts\n",
" colors = [cmap.get(x) for x in df_obs_impacts.storm_regime]\n",
" colors = ['#aaaaaa' if c is None else c for c in colors]\n",
" ax1.scatter([i + 1 for x in n], n, color=colors, **marker_style)\n",
"\n",
"# Add model names to each impact on x axis\n",
"ax1.set_xticks(range(len(impacts['forecasted']) + 1))\n",
"ax1.set_xticklabels(['observed'] +\n",
" [x.replace('_', '\\_') for x in impacts['forecasted']])\n",
"ax1.xaxis.set_tick_params(rotation=90)\n",
"\n",
"# Add title\n",
"ax1.set_title('Storm regime')\n",
"\n",
"# Create custom legend\n",
"legend_elements = [\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Swash',\n",
" markerfacecolor='#1a9850',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Collision',\n",
" markerfacecolor='#fee08b',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Overwash',\n",
" markerfacecolor='#d73027',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
"]\n",
"ax1.legend(\n",
" handles=legend_elements, loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
"\n",
"# Replace yticks with site_ids\n",
"yticks = ax1.get_yticks().tolist()\n",
"yticks = [n_sites[int(y)] if 0 <= y <= len(n_sites) else y for y in yticks]\n",
"ax1.set_yticklabels(yticks)\n",
"\n",
"# ax2: elevations\n",
"\n",
"# Dune elevations\n",
"df_feats = df_profile_features_crest_toes.xs(['prestorm'],\n",
" level=['profile_type'])\n",
"df_feats = df_feats.loc[df_feats.index.str.contains(beach)]\n",
"\n",
"ax2.plot(df_feats.dune_crest_z, n, color='#fdae61')\n",
"ax2.plot(df_feats.dune_toe_z, n, color='#fdae61')\n",
"ax2.fill_betweenx(\n",
" n,\n",
" df_feats.dune_toe_z,\n",
" df_feats.dune_crest_z,\n",
" alpha=0.2,\n",
" color='#fdae61',\n",
" label='$D_{low}$ to $D_{high}$')\n",
"\n",
"model_colors = [\n",
" ax1.scatter([0 for x in n], n, color=colors, **marker_style)\n",
"\n",
" # Plot model impacts\n",
" for i, model in enumerate(impacts['forecasted']):\n",
"\n",
" # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[\n",
" impacts['forecasted'][model].index.str.contains(beach)]\n",
"\n",
" # Recast storm regimes as categorical data\n",
" df_model.storm_regime = df_model.storm_regime.astype(cat_type)\n",
"\n",
" # Assign colors\n",
" colors = [cmap.get(x) for x in df_model.storm_regime]\n",
" colors = ['#aaaaaa' if c is None else c for c in colors]\n",
"\n",
" # Only plot markers which are different to the observed storm regime. \n",
" # This makes it easier to find where model predictions differ\n",
" y_coords = []\n",
" for obs_impact, for_impact in zip(df_model.storm_regime,\n",
" df_obs_impacts.storm_regime):\n",
" if obs_impact == for_impact:\n",
" y_coords.append(None)\n",
" else:\n",
" y_coords.append(i + 1)\n",
"\n",
" ax1.scatter(y_coords, n, color=colors, **marker_style)\n",
"\n",
" # Add model names to each impact on x axis\n",
" ax1.set_xticks(range(len(impacts['forecasted']) + 1))\n",
" ax1.set_xticklabels(['observed'] +\n",
" [x.replace('_', '\\_') for x in impacts['forecasted']])\n",
" ax1.xaxis.set_tick_params(rotation=90)\n",
"\n",
" # Add title\n",
" ax1.set_title('Storm regime')\n",
"\n",
" # Create custom legend\n",
" legend_elements = [\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Swash',\n",
" markerfacecolor='#1a9850',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Collision',\n",
" markerfacecolor='#fee08b',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
" Line2D([0], [0],\n",
" marker='o',\n",
" color='w',\n",
" label='Overwash',\n",
" markerfacecolor='#d73027',\n",
" markersize=8,\n",
" markeredgewidth=1.0,\n",
" markeredgecolor='k'),\n",
" ]\n",
" ax1.legend(\n",
" handles=legend_elements, loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
"\n",
" # Replace axis ticks with names of site ids\n",
" ytick_labels = ax1.get_yticks().tolist()\n",
" yticks = [\n",
" n_sites[int(y)] if all([y >= 0, y < len(n_sites)]) else ''\n",
" for y in ytick_labels\n",
" ]\n",
" yticks = [x.replace('_', '\\_') for x in yticks]\n",
" ax1.set_yticklabels(yticks)\n",
"\n",
" # ax2: elevations\n",
"\n",
" # Dune elevations\n",
" df_feats = df_profile_features_crest_toes.xs(['prestorm'],\n",
" level=['profile_type'])\n",
" df_feats = df_feats.loc[df_feats.index.str.contains(beach)]\n",
"\n",
" ax2.plot(df_feats.dune_crest_z, n, color='#fdae61')\n",
" ax2.plot(df_feats.dune_toe_z, n, color='#fdae61')\n",
" ax2.fill_betweenx(\n",
" n,\n",
" df_feats.dune_toe_z,\n",
" df_feats.dune_crest_z,\n",
" alpha=0.2,\n",
" color='#fdae61',\n",
" label='$D_{low}$ to $D_{high}$')\n",
"\n",
" model_colors = [\n",
" '#1f78b4',\n",
" '#33a02c',\n",
" '#e31a1c',\n",
@ -296,93 +333,149 @@
" '#ffff99',\n",
" ]\n",
"\n",
"# Define colors to cycle through for our R_high\n",
"ax2.set_prop_cycle(cycler('color', model_colors))\n",
"\n",
"# Plot R_high values\n",
"for model in impacts['forecasted']:\n",
"\n",
" # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n",
" index.str.contains(beach)]\n",
"\n",
" # Recast storm regimes as categorical data\n",
" ax2.plot(df_model.R_high, n, label=model.replace('_', '\\_'))\n",
"\n",
"# Set title, legend and labels\n",
"ax2.set_title('TWL \\& dune\\nelevations')\n",
"ax2.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
"ax2.set_xlabel('Elevation (m AHD)')\n",
"\n",
"\n",
"# ax3: Plot R_high - D_low\n",
"\n",
"# Define colors to cycle through for our R_high\n",
"ax3.set_prop_cycle(cycler('color', model_colors))\n",
"\n",
"# Plot R_high values\n",
"for model in impacts['forecasted']:\n",
" \n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n",
" index.str.contains(beach)]\n",
" # R_high - D_low\n",
" ax3.plot(df_model.R_high - df_feats.dune_toe_z, n, label=model.replace('_', '\\_'))\n",
"\n",
"ax3.axvline(x=0,color='black',linestyle=':')\n",
"ax3.set_title('$R_{high}$ - $D_{low}$')\n",
"ax3.set_xlabel('Height (m)')\n",
"ax3.set_xlim([-2,2])\n",
"\n",
" # Define colors to cycle through for our R_high\n",
" ax2.set_prop_cycle(cycler('color', model_colors))\n",
"\n",
" # For TWL elevations, Rhigh-Dlow and R2 axis, only plot a few models\n",
" models_to_plot = [\n",
" 'premean_slope_hol86',\n",
" 'premean_slope_sto06',\n",
" 'preintertidal_slope_hol86',\n",
" 'preintertidal_slope_sto06',\n",
" ]\n",
" models_linewidth = 0.8\n",
"\n",
"# Define colors to cycle through for our R2\n",
"ax4.set_prop_cycle(cycler('color', model_colors))\n",
" # Plot R_high values\n",
" for model in models_to_plot:\n",
"\n",
"# R_high - D_low\n",
"for model in impacts['forecasted']:\n",
" df_R2 = impacts['forecasted'][model].merge(twls['forecasted'][model],on=['site_id','datetime'])\n",
" df_R2 = df_R2.loc[df_R2.index.str.contains(beach)]\n",
" ax4.plot(df_R2.R2, n, label=model.replace('_', '\\_'))\n",
" # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[\n",
" impacts['forecasted'][model].index.str.contains(beach)]\n",
"\n",
"ax4.set_title(r'$R_{2\\%}$')\n",
"ax4.set_xlabel('Height (m)')\n",
" # Recast storm regimes as categorical data\n",
" ax2.plot(\n",
" df_model.R_high,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n",
"# Need to chose a model to extract environmental parameters at maximum R_high time\n",
"model = 'mean_slope_sto06'\n",
"df_beach = impacts['forecasted'][model].merge(twls['forecasted'][model], on=['site_id','datetime'])\n",
"df_beach = df_beach.loc[df_beach.index.str.contains(beach)]\n",
" # Set title, legend and labels\n",
" ax2.set_title('TWL \\& dune\\nelevations')\n",
" ax2.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
" ax2.set_xlabel('Elevation (m AHD)')\n",
"# ax2.set_xlim([0, max(df_feats.dune_crest_z)])\n",
"\n",
"# Wave height, wave period, beach slope\n",
"ax5.plot(df_beach.beta, n,color='#4daf4a')\n",
"ax5.set_title(r'$\\beta$')\n",
"ax5.set_xlabel('Mean prestorm\\nbeach slope')\n",
"ax5.set_xlim([0,0.15])\n",
" # ax3: Plot R_high - D_low\n",
"\n",
"ax6.plot(df_beach.Hs0, n,color='#999999')\n",
"ax6.set_title('$H_{s0}$')\n",
"ax6.set_xlabel('Sig. wave height (m)')\n",
"ax6.set_xlim([3,5])\n",
" # Define colors to cycle through for our R_high\n",
" ax3.set_prop_cycle(cycler('color', model_colors))\n",
"\n",
"ax7.plot(df_beach.Tp, n,color='#999999')\n",
"ax7.set_title('$T_{p}$')\n",
"ax7.set_xlabel('Peak wave period (s)')\n",
"ax7.set_xlim([8,14])\n",
" # Plot R_high values\n",
" for model in models_to_plot:\n",
"\n",
"ax8.plot(df_beach.tide, n,color='#999999')\n",
"ax8.set_title('Tide \\& surge')\n",
"ax8.set_xlabel('Elevation (m AHD)')\n",
"ax8.set_xlim([0,2])\n",
" df_model = impacts['forecasted'][model].loc[\n",
" impacts['forecasted'][model].index.str.contains(beach)]\n",
" # R_high - D_low\n",
" ax3.plot(\n",
" df_model.R_high - df_feats.dune_toe_z,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n",
" ax3.axvline(x=0, color='black', linestyle=':')\n",
" ax3.set_title('$R_{high}$ - $D_{low}$')\n",
" ax3.set_xlabel('Height (m)')\n",
"# ax3.set_xlim([-2, 2])\n",
"\n",
"plt.tight_layout()\n",
"f.subplots_adjust(top=0.88)\n",
"f.suptitle(beach)\n",
" # Define colors to cycle through for our R2\n",
" ax4.set_prop_cycle(cycler('color', model_colors))\n",
"\n",
"# # Print to figure\n",
"plt.savefig('07_{}.png'.format(beach), dpi=600, bbox_inches='tight')\n",
" # R_high - D_low\n",
" for model in models_to_plot:\n",
" df_R2 = impacts['forecasted'][model].merge(\n",
" twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_R2 = df_R2.loc[df_R2.index.str.contains(beach)]\n",
" ax4.plot(\n",
" df_R2.R2,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n",
" ax4.set_title(r'$R_{2\\%}$')\n",
" ax4.set_xlabel('Height (m)')\n",
"# ax4.set_xlim([0, 10])\n",
"\n",
" # Beach slope\n",
" slope_colors = [\n",
" '#bebada',\n",
" '#bc80bd',\n",
" '#ffed6f',\n",
" '#fdb462',\n",
" ]\n",
" ax5.set_prop_cycle(cycler('color', slope_colors))\n",
" slope_models = {\n",
" 'prestorm mean': 'premean_slope_sto06',\n",
" 'poststorm mean': 'postmean_slope_sto06',\n",
" 'prestorm intertidal': 'preintertidal_slope_sto06',\n",
" 'poststorm intertidal': 'postintertidal_slope_sto06',\n",
" }\n",
"\n",
"plt.show()\n",
"plt.close()"
" for label in slope_models:\n",
" model = slope_models[label]\n",
" df_beta = impacts['forecasted'][model].merge(\n",
" twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_beta = df_beta.loc[df_beta.index.str.contains(beach)]\n",
" ax5.plot(df_beta.beta, n, label=label, linewidth=models_linewidth)\n",
"\n",
" ax5.set_title(r'$\\beta$')\n",
" ax5.set_xlabel('Beach slope')\n",
" ax5.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
" # ax5.set_xlim([0, 0.15])\n",
"\n",
" # Need to chose a model to extract environmental parameters at maximum R_high time\n",
" model = 'premean_slope_sto06'\n",
" df_beach = impacts['forecasted'][model].merge(\n",
" twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_beach = df_beach.loc[df_beach.index.str.contains(beach)]\n",
"\n",
" # Wave height, wave period\n",
" ax6.plot(df_beach.Hs0, n, color='#999999')\n",
" ax6.set_title('$H_{s0}$')\n",
" ax6.set_xlabel('Sig. wave height (m)')\n",
" ax6.set_xlim([3, 5])\n",
"\n",
" ax7.plot(df_beach.Tp, n, color='#999999')\n",
" ax7.set_title('$T_{p}$')\n",
" ax7.set_xlabel('Peak wave period (s)')\n",
" ax7.set_xlim([8, 14])\n",
"\n",
" ax8.plot(df_beach.tide, n, color='#999999')\n",
" ax8.set_title('Tide \\& surge')\n",
" ax8.set_xlabel('Elevation (m AHD)')\n",
" ax8.set_xlim([0, 2])\n",
"\n",
" plt.tight_layout()\n",
" f.subplots_adjust(top=0.88)\n",
" f.suptitle(beach.replace('_', '\\_'))\n",
"\n",
" # Set minor axis ticks on each plot\n",
" ax1.yaxis.set_minor_locator(MultipleLocator(1))\n",
" ax1.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax2.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax3.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax4.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax5.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax6.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax7.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax8.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
"\n",
" # # Print to figure\n",
" plt.savefig('07_{}.png'.format(beach), dpi=600, bbox_inches='tight')\n",
"\n",
"# plt.show()\n",
" plt.close()\n",
" print('Done: {}'.format(beach))\n",
"print('Done!')"
]
},
{
@ -393,6 +486,13 @@
"Use sklearn metrics to generate classification reports for each forecasting model."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
@ -407,7 +507,7 @@
"# Convert storm regimes to categorical datatype\n",
"cat_type = CategoricalDtype(\n",
" categories=['swash', 'collision', 'overwash', 'inundation'], ordered=True)\n",
"df_obs.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n",
"df_obs.storm_regime = df_obs.storm_regime.astype(cat_type)\n",
"\n",
"for model in impacts['forecasted']:\n",
" df_for = impacts['forecasted'][model]\n",

@ -118,10 +118,22 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_nie91': df_from_csv('impacts_forecasted_mean_slope_nie91.csv', index_col=[0]),\n",
" 'mean_slope_hol86': df_from_csv('impacts_forecasted_mean_slope_hol86.csv', index_col=[0]),\n",
" 'postintertidal_slope_hol86': df_from_csv('impacts_forecasted_postintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'postintertidal_slope_nie91': df_from_csv('impacts_forecasted_postintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'postintertidal_slope_pow18': df_from_csv('impacts_forecasted_postintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'postmean_slope_hol86': df_from_csv('impacts_forecasted_postmean_slope_hol86.csv', index_col=[0]),\n",
" 'postmean_slope_nie91': df_from_csv('impacts_forecasted_postmean_slope_nie91.csv', index_col=[0]),\n",
" 'postmean_slope_pow18': df_from_csv('impacts_forecasted_postmean_slope_pow18.csv', index_col=[0]),\n",
" 'postmean_slope_sto06': df_from_csv('impacts_forecasted_postmean_slope_sto06.csv', index_col=[0]),\n",
" 'preintertidal_slope_hol86': df_from_csv('impacts_forecasted_preintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'preintertidal_slope_nie91': df_from_csv('impacts_forecasted_preintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'preintertidal_slope_pow18': df_from_csv('impacts_forecasted_preintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'preintertidal_slope_sto06': df_from_csv('impacts_forecasted_preintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'premean_slope_hol86': df_from_csv('impacts_forecasted_premean_slope_hol86.csv', index_col=[0]),\n",
" 'premean_slope_nie91': df_from_csv('impacts_forecasted_premean_slope_nie91.csv', index_col=[0]),\n",
" 'premean_slope_pow18': df_from_csv('impacts_forecasted_premean_slope_pow18.csv', index_col=[0]),\n",
" 'premean_slope_sto06': df_from_csv('impacts_forecasted_premean_slope_sto06.csv', index_col=[0]),\n",
" },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n",
@ -129,10 +141,22 @@
"\n",
"twls = {\n",
" 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n",
" 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n",
" 'mean_slope_nie91':df_from_csv('twl_mean_slope_nie91.csv', index_col=[0, 1]),\n",
" 'mean_slope_hol86':df_from_csv('twl_mean_slope_hol86.csv', index_col=[0, 1]),\n",
" 'postintertidal_slope_hol86.csv': df_from_csv('twl_postintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_nie91.csv': df_from_csv('twl_postintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_pow18.csv': df_from_csv('twl_postintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'postintertidal_slope_sto06.csv': df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'postmean_slope_hol86.csv': df_from_csv('twl_postmean_slope_hol86.csv', index_col=[0,1]),\n",
" 'postmean_slope_nie91.csv': df_from_csv('twl_postmean_slope_nie91.csv', index_col=[0,1]),\n",
" 'postmean_slope_pow18.csv': df_from_csv('twl_postmean_slope_pow18.csv', index_col=[0,1]),\n",
" 'postmean_slope_sto06.csv': df_from_csv('twl_postmean_slope_sto06.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_hol86.csv': df_from_csv('twl_preintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_nie91.csv': df_from_csv('twl_preintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_pow18.csv': df_from_csv('twl_preintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_sto06.csv': df_from_csv('twl_preintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'premean_slope_hol86.csv': df_from_csv('twl_premean_slope_hol86.csv', index_col=[0,1]),\n",
" 'premean_slope_nie91.csv': df_from_csv('twl_premean_slope_nie91.csv', index_col=[0,1]),\n",
" 'premean_slope_pow18.csv': df_from_csv('twl_premean_slope_pow18.csv', index_col=[0,1]),\n",
" 'premean_slope_sto06.csv': df_from_csv('twl_premean_slope_sto06.csv', index_col=[0,1]),\n",
" }\n",
"}\n",
"print('Done!')"

Loading…
Cancel
Save