Update notebooks

develop
Chris Leaman 6 years ago
parent 6ffdd2611d
commit 66c7b25cc4

@ -90,8 +90,7 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n", "# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n", "impacts = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n", " 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n",
" },\n", " },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n", " 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n", " }\n",
@ -99,8 +98,7 @@
"\n", "\n",
"twls = {\n", "twls = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n", " 'postintertidal_slope_sto06':df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0, 1]),\n",
" 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n",
" }\n", " }\n",
"}\n", "}\n",
"print('Done!')" "print('Done!')"
@ -260,6 +258,7 @@
" title='Bed Profiles',\n", " title='Bed Profiles',\n",
" height=300,\n", " height=300,\n",
" legend=dict(font={'size': 10}),\n", " legend=dict(font={'size': 10}),\n",
" showlegend=False,\n",
" margin=dict(t=50, b=50, l=50, r=20),\n", " margin=dict(t=50, b=50, l=50, r=20),\n",
" xaxis=dict(\n", " xaxis=dict(\n",
" title='x (m)',\n", " title='x (m)',\n",

@ -10,9 +10,7 @@
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": { "metadata": {},
"heading_collapsed": true
},
"source": [ "source": [
"## Setup notebook\n", "## Setup notebook\n",
"Import our required packages and set default plotting options." "Import our required packages and set default plotting options."
@ -21,9 +19,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {},
"hidden": true
},
"outputs": [], "outputs": [],
"source": [ "source": [
"# Enable autoreloading of our modules. \n", "# Enable autoreloading of our modules. \n",
@ -37,9 +33,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {},
"hidden": true
},
"outputs": [], "outputs": [],
"source": [ "source": [
"from IPython.core.debugger import set_trace\n", "from IPython.core.debugger import set_trace\n",
@ -66,6 +60,7 @@
"from scipy import stats\n", "from scipy import stats\n",
"from sklearn.metrics import confusion_matrix\n", "from sklearn.metrics import confusion_matrix\n",
"import matplotlib.pyplot as plt\n", "import matplotlib.pyplot as plt\n",
"from matplotlib.ticker import MultipleLocator\n",
"from matplotlib.lines import Line2D\n", "from matplotlib.lines import Line2D\n",
"from cycler import cycler\n", "from cycler import cycler\n",
"from scipy.interpolate import interp1d\n", "from scipy.interpolate import interp1d\n",
@ -75,9 +70,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {},
"hidden": true
},
"outputs": [], "outputs": [],
"source": [ "source": [
"# Matplot lib default settings\n", "# Matplot lib default settings\n",
@ -128,11 +121,22 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n", "# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n", "impacts = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv',index_col=[0]),\n", " 'postintertidal_slope_hol86': df_from_csv('impacts_forecasted_postintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'mean_slope_hol86': df_from_csv('impacts_forecasted_mean_slope_hol86.csv',index_col=[0]),\n", " 'postintertidal_slope_nie91': df_from_csv('impacts_forecasted_postintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'mean_slope_nie91': df_from_csv('impacts_forecasted_mean_slope_nie91.csv',index_col=[0]),\n", " 'postintertidal_slope_pow18': df_from_csv('impacts_forecasted_postintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'mean_slope_pow18': df_from_csv('impacts_forecasted_mean_slope_pow18.csv',index_col=[0]),\n", " 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv',index_col=[0]),\n", " 'postmean_slope_hol86': df_from_csv('impacts_forecasted_postmean_slope_hol86.csv', index_col=[0]),\n",
" 'postmean_slope_nie91': df_from_csv('impacts_forecasted_postmean_slope_nie91.csv', index_col=[0]),\n",
" 'postmean_slope_pow18': df_from_csv('impacts_forecasted_postmean_slope_pow18.csv', index_col=[0]),\n",
" 'postmean_slope_sto06': df_from_csv('impacts_forecasted_postmean_slope_sto06.csv', index_col=[0]),\n",
" 'preintertidal_slope_hol86': df_from_csv('impacts_forecasted_preintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'preintertidal_slope_nie91': df_from_csv('impacts_forecasted_preintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'preintertidal_slope_pow18': df_from_csv('impacts_forecasted_preintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'preintertidal_slope_sto06': df_from_csv('impacts_forecasted_preintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'premean_slope_hol86': df_from_csv('impacts_forecasted_premean_slope_hol86.csv', index_col=[0]),\n",
" 'premean_slope_nie91': df_from_csv('impacts_forecasted_premean_slope_nie91.csv', index_col=[0]),\n",
" 'premean_slope_pow18': df_from_csv('impacts_forecasted_premean_slope_pow18.csv', index_col=[0]),\n",
" 'premean_slope_sto06': df_from_csv('impacts_forecasted_premean_slope_sto06.csv', index_col=[0]),\n",
" },\n", " },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n", " 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n", " }\n",
@ -140,11 +144,22 @@
"\n", "\n",
"twls = {\n", "twls = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv',index_col=[0,1]),\n", " 'postintertidal_slope_hol86': df_from_csv('twl_postintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'mean_slope_hol86': df_from_csv('twl_mean_slope_hol86.csv',index_col=[0,1]),\n", " 'postintertidal_slope_nie91': df_from_csv('twl_postintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'mean_slope_nie91': df_from_csv('twl_mean_slope_nie91.csv',index_col=[0,1]),\n", " 'postintertidal_slope_pow18': df_from_csv('twl_postintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'mean_slope_pow18': df_from_csv('twl_mean_slope_pow18.csv',index_col=[0,1]),\n", " 'postintertidal_slope_sto06': df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'mean_slope_sto06': df_from_csv('twl_mean_slope_sto06.csv',index_col=[0,1]),\n", " 'postmean_slope_hol86': df_from_csv('twl_postmean_slope_hol86.csv', index_col=[0,1]),\n",
" 'postmean_slope_nie91': df_from_csv('twl_postmean_slope_nie91.csv', index_col=[0,1]),\n",
" 'postmean_slope_pow18': df_from_csv('twl_postmean_slope_pow18.csv', index_col=[0,1]),\n",
" 'postmean_slope_sto06': df_from_csv('twl_postmean_slope_sto06.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_hol86': df_from_csv('twl_preintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_nie91': df_from_csv('twl_preintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_pow18': df_from_csv('twl_preintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_sto06': df_from_csv('twl_preintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'premean_slope_hol86': df_from_csv('twl_premean_slope_hol86.csv', index_col=[0,1]),\n",
" 'premean_slope_nie91': df_from_csv('twl_premean_slope_nie91.csv', index_col=[0,1]),\n",
" 'premean_slope_pow18': df_from_csv('twl_premean_slope_pow18.csv', index_col=[0,1]),\n",
" 'premean_slope_sto06': df_from_csv('twl_premean_slope_sto06.csv', index_col=[0,1]),\n",
" }\n", " }\n",
"}\n", "}\n",
"print('Done!')" "print('Done!')"
@ -152,9 +167,7 @@
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": { "metadata": {},
"heading_collapsed": true
},
"source": [ "source": [
"## Generate longshore plots for each beach" "## Generate longshore plots for each beach"
] ]
@ -163,11 +176,16 @@
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {
"hidden": true "code_folding": []
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"beach = 'NARRA'\n", "beaches = list(\n",
" set([\n",
" x[:-4] for x in df_profiles.index.get_level_values('site_id').unique()\n",
" ]))\n",
"\n",
"for beach in beaches:\n",
" \n", " \n",
" df_obs_impacts = impacts['observed'].loc[impacts['observed'].index.str.\n", " df_obs_impacts = impacts['observed'].loc[impacts['observed'].index.str.\n",
" contains(beach)]\n", " contains(beach)]\n",
@ -178,15 +196,19 @@
"\n", "\n",
" # Convert storm regimes to categorical datatype\n", " # Convert storm regimes to categorical datatype\n",
" cat_type = CategoricalDtype(\n", " cat_type = CategoricalDtype(\n",
" categories=['swash', 'collision', 'overwash', 'inundation'], ordered=True)\n", " categories=['swash', 'collision', 'overwash', 'inundation'],\n",
" ordered=True)\n",
" df_obs_impacts.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n", " df_obs_impacts.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n",
"\n", "\n",
" # Create figure\n", " # Create figure\n",
" \n",
" # Determine the height of the figure, based on the number of sites.\n",
" fig_height = max(6, 0.18 * len(n_sites))\n",
" f, (ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8) = plt.subplots(\n", " f, (ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8) = plt.subplots(\n",
" 1,\n", " 1,\n",
" 8,\n", " 8,\n",
" sharey=True,\n", " sharey=True,\n",
" figsize=(18, 8),\n", " figsize=(18, fig_height),\n",
" gridspec_kw={'width_ratios': [4, 4, 2, 2, 2, 2, 2, 2]})\n", " gridspec_kw={'width_ratios': [4, 4, 2, 2, 2, 2, 2, 2]})\n",
"\n", "\n",
" # ax1: Impacts\n", " # ax1: Impacts\n",
@ -205,15 +227,15 @@
"\n", "\n",
" # Plot observed impacts\n", " # Plot observed impacts\n",
" colors = [cmap.get(x) for x in df_obs_impacts.storm_regime]\n", " colors = [cmap.get(x) for x in df_obs_impacts.storm_regime]\n",
"colors = ['#d73027' if c is None else c for c in colors]\n", " colors = ['#aaaaaa' if c is None else c for c in colors]\n",
" ax1.scatter([0 for x in n], n, color=colors, **marker_style)\n", " ax1.scatter([0 for x in n], n, color=colors, **marker_style)\n",
"\n", "\n",
" # Plot model impacts\n", " # Plot model impacts\n",
" for i, model in enumerate(impacts['forecasted']):\n", " for i, model in enumerate(impacts['forecasted']):\n",
"\n", "\n",
" # Only get model results for this beach\n", " # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n", " df_model = impacts['forecasted'][model].loc[\n",
" index.str.contains(beach)]\n", " impacts['forecasted'][model].index.str.contains(beach)]\n",
"\n", "\n",
" # Recast storm regimes as categorical data\n", " # Recast storm regimes as categorical data\n",
" df_model.storm_regime = df_model.storm_regime.astype(cat_type)\n", " df_model.storm_regime = df_model.storm_regime.astype(cat_type)\n",
@ -221,7 +243,18 @@
" # Assign colors\n", " # Assign colors\n",
" colors = [cmap.get(x) for x in df_model.storm_regime]\n", " colors = [cmap.get(x) for x in df_model.storm_regime]\n",
" colors = ['#aaaaaa' if c is None else c for c in colors]\n", " colors = ['#aaaaaa' if c is None else c for c in colors]\n",
" ax1.scatter([i + 1 for x in n], n, color=colors, **marker_style)\n", "\n",
" # Only plot markers which are different to the observed storm regime. \n",
" # This makes it easier to find where model predictions differ\n",
" y_coords = []\n",
" for obs_impact, for_impact in zip(df_model.storm_regime,\n",
" df_obs_impacts.storm_regime):\n",
" if obs_impact == for_impact:\n",
" y_coords.append(None)\n",
" else:\n",
" y_coords.append(i + 1)\n",
"\n",
" ax1.scatter(y_coords, n, color=colors, **marker_style)\n",
"\n", "\n",
" # Add model names to each impact on x axis\n", " # Add model names to each impact on x axis\n",
" ax1.set_xticks(range(len(impacts['forecasted']) + 1))\n", " ax1.set_xticks(range(len(impacts['forecasted']) + 1))\n",
@ -262,9 +295,13 @@
" ax1.legend(\n", " ax1.legend(\n",
" handles=legend_elements, loc='lower center', bbox_to_anchor=(0.5, 1.1))\n", " handles=legend_elements, loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
"\n", "\n",
"# Replace yticks with site_ids\n", " # Replace axis ticks with names of site ids\n",
"yticks = ax1.get_yticks().tolist()\n", " ytick_labels = ax1.get_yticks().tolist()\n",
"yticks = [n_sites[int(y)] if 0 <= y <= len(n_sites) else y for y in yticks]\n", " yticks = [\n",
" n_sites[int(y)] if all([y >= 0, y < len(n_sites)]) else ''\n",
" for y in ytick_labels\n",
" ]\n",
" yticks = [x.replace('_', '\\_') for x in yticks]\n",
" ax1.set_yticklabels(yticks)\n", " ax1.set_yticklabels(yticks)\n",
"\n", "\n",
" # ax2: elevations\n", " # ax2: elevations\n",
@ -299,21 +336,34 @@
" # Define colors to cycle through for our R_high\n", " # Define colors to cycle through for our R_high\n",
" ax2.set_prop_cycle(cycler('color', model_colors))\n", " ax2.set_prop_cycle(cycler('color', model_colors))\n",
"\n", "\n",
" # For TWL elevations, Rhigh-Dlow and R2 axis, only plot a few models\n",
" models_to_plot = [\n",
" 'premean_slope_hol86',\n",
" 'premean_slope_sto06',\n",
" 'preintertidal_slope_hol86',\n",
" 'preintertidal_slope_sto06',\n",
" ]\n",
" models_linewidth = 0.8\n",
"\n",
" # Plot R_high values\n", " # Plot R_high values\n",
"for model in impacts['forecasted']:\n", " for model in models_to_plot:\n",
"\n", "\n",
" # Only get model results for this beach\n", " # Only get model results for this beach\n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n", " df_model = impacts['forecasted'][model].loc[\n",
" index.str.contains(beach)]\n", " impacts['forecasted'][model].index.str.contains(beach)]\n",
"\n", "\n",
" # Recast storm regimes as categorical data\n", " # Recast storm regimes as categorical data\n",
" ax2.plot(df_model.R_high, n, label=model.replace('_', '\\_'))\n", " ax2.plot(\n",
" df_model.R_high,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n", "\n",
" # Set title, legend and labels\n", " # Set title, legend and labels\n",
" ax2.set_title('TWL \\& dune\\nelevations')\n", " ax2.set_title('TWL \\& dune\\nelevations')\n",
" ax2.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n", " ax2.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
" ax2.set_xlabel('Elevation (m AHD)')\n", " ax2.set_xlabel('Elevation (m AHD)')\n",
"\n", "# ax2.set_xlim([0, max(df_feats.dune_crest_z)])\n",
"\n", "\n",
" # ax3: Plot R_high - D_low\n", " # ax3: Plot R_high - D_low\n",
"\n", "\n",
@ -321,43 +371,74 @@
" ax3.set_prop_cycle(cycler('color', model_colors))\n", " ax3.set_prop_cycle(cycler('color', model_colors))\n",
"\n", "\n",
" # Plot R_high values\n", " # Plot R_high values\n",
"for model in impacts['forecasted']:\n", " for model in models_to_plot:\n",
"\n", "\n",
" df_model = impacts['forecasted'][model].loc[impacts['forecasted'][model].\n", " df_model = impacts['forecasted'][model].loc[\n",
" index.str.contains(beach)]\n", " impacts['forecasted'][model].index.str.contains(beach)]\n",
" # R_high - D_low\n", " # R_high - D_low\n",
" ax3.plot(df_model.R_high - df_feats.dune_toe_z, n, label=model.replace('_', '\\_'))\n", " ax3.plot(\n",
" df_model.R_high - df_feats.dune_toe_z,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n", "\n",
" ax3.axvline(x=0, color='black', linestyle=':')\n", " ax3.axvline(x=0, color='black', linestyle=':')\n",
" ax3.set_title('$R_{high}$ - $D_{low}$')\n", " ax3.set_title('$R_{high}$ - $D_{low}$')\n",
" ax3.set_xlabel('Height (m)')\n", " ax3.set_xlabel('Height (m)')\n",
"ax3.set_xlim([-2,2])\n", "# ax3.set_xlim([-2, 2])\n",
"\n",
"\n",
"\n", "\n",
" # Define colors to cycle through for our R2\n", " # Define colors to cycle through for our R2\n",
" ax4.set_prop_cycle(cycler('color', model_colors))\n", " ax4.set_prop_cycle(cycler('color', model_colors))\n",
"\n", "\n",
" # R_high - D_low\n", " # R_high - D_low\n",
"for model in impacts['forecasted']:\n", " for model in models_to_plot:\n",
" df_R2 = impacts['forecasted'][model].merge(twls['forecasted'][model],on=['site_id','datetime'])\n", " df_R2 = impacts['forecasted'][model].merge(\n",
" twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_R2 = df_R2.loc[df_R2.index.str.contains(beach)]\n", " df_R2 = df_R2.loc[df_R2.index.str.contains(beach)]\n",
" ax4.plot(df_R2.R2, n, label=model.replace('_', '\\_'))\n", " ax4.plot(\n",
" df_R2.R2,\n",
" n,\n",
" label=model.replace('_', '\\_'),\n",
" linewidth=models_linewidth)\n",
"\n", "\n",
" ax4.set_title(r'$R_{2\\%}$')\n", " ax4.set_title(r'$R_{2\\%}$')\n",
" ax4.set_xlabel('Height (m)')\n", " ax4.set_xlabel('Height (m)')\n",
"# ax4.set_xlim([0, 10])\n",
"\n",
" # Beach slope\n",
" slope_colors = [\n",
" '#bebada',\n",
" '#bc80bd',\n",
" '#ffed6f',\n",
" '#fdb462',\n",
" ]\n",
" ax5.set_prop_cycle(cycler('color', slope_colors))\n",
" slope_models = {\n",
" 'prestorm mean': 'premean_slope_sto06',\n",
" 'poststorm mean': 'postmean_slope_sto06',\n",
" 'prestorm intertidal': 'preintertidal_slope_sto06',\n",
" 'poststorm intertidal': 'postintertidal_slope_sto06',\n",
" }\n",
"\n", "\n",
"# Need to chose a model to extract environmental parameters at maximum R_high time\n", " for label in slope_models:\n",
"model = 'mean_slope_sto06'\n", " model = slope_models[label]\n",
"df_beach = impacts['forecasted'][model].merge(twls['forecasted'][model], on=['site_id','datetime'])\n", " df_beta = impacts['forecasted'][model].merge(\n",
"df_beach = df_beach.loc[df_beach.index.str.contains(beach)]\n", " twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_beta = df_beta.loc[df_beta.index.str.contains(beach)]\n",
" ax5.plot(df_beta.beta, n, label=label, linewidth=models_linewidth)\n",
"\n", "\n",
"# Wave height, wave period, beach slope\n",
"ax5.plot(df_beach.beta, n,color='#4daf4a')\n",
" ax5.set_title(r'$\\beta$')\n", " ax5.set_title(r'$\\beta$')\n",
"ax5.set_xlabel('Mean prestorm\\nbeach slope')\n", " ax5.set_xlabel('Beach slope')\n",
"ax5.set_xlim([0,0.15])\n", " ax5.legend(loc='lower center', bbox_to_anchor=(0.5, 1.1))\n",
" # ax5.set_xlim([0, 0.15])\n",
"\n",
" # Need to chose a model to extract environmental parameters at maximum R_high time\n",
" model = 'premean_slope_sto06'\n",
" df_beach = impacts['forecasted'][model].merge(\n",
" twls['forecasted'][model], on=['site_id', 'datetime'], how='left')\n",
" df_beach = df_beach.loc[df_beach.index.str.contains(beach)]\n",
"\n", "\n",
" # Wave height, wave period\n",
" ax6.plot(df_beach.Hs0, n, color='#999999')\n", " ax6.plot(df_beach.Hs0, n, color='#999999')\n",
" ax6.set_title('$H_{s0}$')\n", " ax6.set_title('$H_{s0}$')\n",
" ax6.set_xlabel('Sig. wave height (m)')\n", " ax6.set_xlabel('Sig. wave height (m)')\n",
@ -373,16 +454,28 @@
" ax8.set_xlabel('Elevation (m AHD)')\n", " ax8.set_xlabel('Elevation (m AHD)')\n",
" ax8.set_xlim([0, 2])\n", " ax8.set_xlim([0, 2])\n",
"\n", "\n",
"\n",
" plt.tight_layout()\n", " plt.tight_layout()\n",
" f.subplots_adjust(top=0.88)\n", " f.subplots_adjust(top=0.88)\n",
"f.suptitle(beach)\n", " f.suptitle(beach.replace('_', '\\_'))\n",
"\n",
" # Set minor axis ticks on each plot\n",
" ax1.yaxis.set_minor_locator(MultipleLocator(1))\n",
" ax1.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax2.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax3.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax4.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax5.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax6.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax7.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
" ax8.yaxis.grid(True, which='minor', linestyle='--', alpha=0.1)\n",
"\n", "\n",
" # # Print to figure\n", " # # Print to figure\n",
" plt.savefig('07_{}.png'.format(beach), dpi=600, bbox_inches='tight')\n", " plt.savefig('07_{}.png'.format(beach), dpi=600, bbox_inches='tight')\n",
"\n", "\n",
"plt.show()\n", "# plt.show()\n",
"plt.close()" " plt.close()\n",
" print('Done: {}'.format(beach))\n",
"print('Done!')"
] ]
}, },
{ {
@ -393,6 +486,13 @@
"Use sklearn metrics to generate classification reports for each forecasting model." "Use sklearn metrics to generate classification reports for each forecasting model."
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@ -407,7 +507,7 @@
"# Convert storm regimes to categorical datatype\n", "# Convert storm regimes to categorical datatype\n",
"cat_type = CategoricalDtype(\n", "cat_type = CategoricalDtype(\n",
" categories=['swash', 'collision', 'overwash', 'inundation'], ordered=True)\n", " categories=['swash', 'collision', 'overwash', 'inundation'], ordered=True)\n",
"df_obs.storm_regime = df_obs_impacts.storm_regime.astype(cat_type)\n", "df_obs.storm_regime = df_obs.storm_regime.astype(cat_type)\n",
"\n", "\n",
"for model in impacts['forecasted']:\n", "for model in impacts['forecasted']:\n",
" df_for = impacts['forecasted'][model]\n", " df_for = impacts['forecasted'][model]\n",

@ -118,10 +118,22 @@
"# Note that the forecasted data sets should be in the same order for impacts and twls\n", "# Note that the forecasted data sets should be in the same order for impacts and twls\n",
"impacts = {\n", "impacts = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n", " 'postintertidal_slope_hol86': df_from_csv('impacts_forecasted_postintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n", " 'postintertidal_slope_nie91': df_from_csv('impacts_forecasted_postintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'mean_slope_nie91': df_from_csv('impacts_forecasted_mean_slope_nie91.csv', index_col=[0]),\n", " 'postintertidal_slope_pow18': df_from_csv('impacts_forecasted_postintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'mean_slope_hol86': df_from_csv('impacts_forecasted_mean_slope_hol86.csv', index_col=[0]),\n", " 'postintertidal_slope_sto06': df_from_csv('impacts_forecasted_postintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'postmean_slope_hol86': df_from_csv('impacts_forecasted_postmean_slope_hol86.csv', index_col=[0]),\n",
" 'postmean_slope_nie91': df_from_csv('impacts_forecasted_postmean_slope_nie91.csv', index_col=[0]),\n",
" 'postmean_slope_pow18': df_from_csv('impacts_forecasted_postmean_slope_pow18.csv', index_col=[0]),\n",
" 'postmean_slope_sto06': df_from_csv('impacts_forecasted_postmean_slope_sto06.csv', index_col=[0]),\n",
" 'preintertidal_slope_hol86': df_from_csv('impacts_forecasted_preintertidal_slope_hol86.csv', index_col=[0]),\n",
" 'preintertidal_slope_nie91': df_from_csv('impacts_forecasted_preintertidal_slope_nie91.csv', index_col=[0]),\n",
" 'preintertidal_slope_pow18': df_from_csv('impacts_forecasted_preintertidal_slope_pow18.csv', index_col=[0]),\n",
" 'preintertidal_slope_sto06': df_from_csv('impacts_forecasted_preintertidal_slope_sto06.csv', index_col=[0]),\n",
" 'premean_slope_hol86': df_from_csv('impacts_forecasted_premean_slope_hol86.csv', index_col=[0]),\n",
" 'premean_slope_nie91': df_from_csv('impacts_forecasted_premean_slope_nie91.csv', index_col=[0]),\n",
" 'premean_slope_pow18': df_from_csv('impacts_forecasted_premean_slope_pow18.csv', index_col=[0]),\n",
" 'premean_slope_sto06': df_from_csv('impacts_forecasted_premean_slope_sto06.csv', index_col=[0]),\n",
" },\n", " },\n",
" 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n", " 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n",
" }\n", " }\n",
@ -129,10 +141,22 @@
"\n", "\n",
"twls = {\n", "twls = {\n",
" 'forecasted': {\n", " 'forecasted': {\n",
" 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n", " 'postintertidal_slope_hol86.csv': df_from_csv('twl_postintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n", " 'postintertidal_slope_nie91.csv': df_from_csv('twl_postintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'mean_slope_nie91':df_from_csv('twl_mean_slope_nie91.csv', index_col=[0, 1]),\n", " 'postintertidal_slope_pow18.csv': df_from_csv('twl_postintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'mean_slope_hol86':df_from_csv('twl_mean_slope_hol86.csv', index_col=[0, 1]),\n", " 'postintertidal_slope_sto06.csv': df_from_csv('twl_postintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'postmean_slope_hol86.csv': df_from_csv('twl_postmean_slope_hol86.csv', index_col=[0,1]),\n",
" 'postmean_slope_nie91.csv': df_from_csv('twl_postmean_slope_nie91.csv', index_col=[0,1]),\n",
" 'postmean_slope_pow18.csv': df_from_csv('twl_postmean_slope_pow18.csv', index_col=[0,1]),\n",
" 'postmean_slope_sto06.csv': df_from_csv('twl_postmean_slope_sto06.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_hol86.csv': df_from_csv('twl_preintertidal_slope_hol86.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_nie91.csv': df_from_csv('twl_preintertidal_slope_nie91.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_pow18.csv': df_from_csv('twl_preintertidal_slope_pow18.csv', index_col=[0,1]),\n",
" 'preintertidal_slope_sto06.csv': df_from_csv('twl_preintertidal_slope_sto06.csv', index_col=[0,1]),\n",
" 'premean_slope_hol86.csv': df_from_csv('twl_premean_slope_hol86.csv', index_col=[0,1]),\n",
" 'premean_slope_nie91.csv': df_from_csv('twl_premean_slope_nie91.csv', index_col=[0,1]),\n",
" 'premean_slope_pow18.csv': df_from_csv('twl_premean_slope_pow18.csv', index_col=[0,1]),\n",
" 'premean_slope_sto06.csv': df_from_csv('twl_premean_slope_sto06.csv', index_col=[0,1]),\n",
" }\n", " }\n",
"}\n", "}\n",
"print('Done!')" "print('Done!')"

Loading…
Cancel
Save