{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "## Investigate \"collision protection volume\" concept" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T02:45:14.908283Z", "start_time": "2018-12-05T02:45:14.556163Z" } }, "outputs": [], "source": [ "%matplotlib inline\n", "%reload_ext autoreload\n", "%autoreload" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T02:45:34.323928Z", "start_time": "2018-12-05T02:45:14.911088Z" } }, "outputs": [], "source": [ "from IPython.core.debugger import set_trace\n", "\n", "import pandas as pd\n", "import numpy as np\n", "import os\n", "\n", "import plotly\n", "import plotly.graph_objs as go\n", "import plotly.plotly as py\n", "import plotly.tools as tls\n", "import plotly.figure_factory as ff\n", "import plotly.io as pio" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Load data\n", "Load data from the `./data/interim/` folder and parse into `pandas` dataframes." ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T02:45:53.010702Z", "start_time": "2018-12-05T02:45:34.324930Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Importing profiles.csv\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\z5189959\\Desktop\\nsw-2016-storm-impact\\.venv\\lib\\site-packages\\numpy\\lib\\arraysetops.py:522: FutureWarning:\n", "\n", "elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison\n", "\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Importing profile_features.csv\n", "Importing impacts_forecasted_foreshore_slope_sto06.csv\n", "Importing impacts_forecasted_mean_slope_sto06.csv\n", "Importing impacts_observed.csv\n", "Importing twl_foreshore_slope_sto06.csv\n", "Importing twl_mean_slope_sto06.csv\n", "Done!\n" ] } ], "source": [ "def df_from_csv(csv, index_col, data_folder='../data/interim'):\n", " print('Importing {}'.format(csv))\n", " return pd.read_csv(os.path.join(data_folder,csv), index_col=index_col)\n", "\n", "df_profiles = df_from_csv('profiles.csv', index_col=[0, 1, 2])\n", "df_profile_features = df_from_csv('profile_features.csv', index_col=[0])\n", "\n", "impacts = {\n", " 'forecasted': {\n", " 'foreshore_slope_sto06': df_from_csv('impacts_forecasted_foreshore_slope_sto06.csv', index_col=[0]),\n", " 'mean_slope_sto06': df_from_csv('impacts_forecasted_mean_slope_sto06.csv', index_col=[0]),\n", " },\n", " 'observed': df_from_csv('impacts_observed.csv', index_col=[0])\n", " }\n", "\n", "twls = {\n", " 'forecasted': {\n", " 'foreshore_slope_sto06': df_from_csv('twl_foreshore_slope_sto06.csv', index_col=[0, 1]),\n", " 'mean_slope_sto06':df_from_csv('twl_mean_slope_sto06.csv', index_col=[0, 1]),\n", " }\n", "}\n", "\n", "print('Done!')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Lets define a function to calculate the \"collision protection volume\" based on prestorm profiles." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Get berm feature functions\n", "Define a couple of functions which are going to help us get features of our berms." ] }, { "cell_type": "code", "execution_count": 27, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T03:01:56.646213Z", "start_time": "2018-12-05T03:01:56.366466Z" }, "code_folding": [] }, "outputs": [], "source": [ "from shapely.geometry import Point, LineString, Polygon\n", "\n", "\n", "def collision_protection_vol(x, z, d_low_x, d_low_z, lower_z, angle):\n", " # First, get the bounding line strings of our protection volume\n", " lower_line = LineString([Point(min(x), lower_z), Point(max(x), lower_z)])\n", " profile_line = LineString([Point(x_coord, z_coord) for x_coord, z_coord in zip(x, z)\n", " if all([not np.isnan(x_coord), not np.isnan(z_coord)])])\n", " slope_line = LineString([Point(d_low_x, d_low_z),\n", " Point(max(x), d_low_z - max(x) * np.sin(np.deg2rad(angle)))])\n", "\n", " # Work out where our lower line and slope line intersect\n", " lower_profile_intersection = lower_line.intersection(profile_line)\n", " d_protected_intersection = lower_line.intersection(slope_line)\n", "\n", " # Define the perimeter of the protection area\n", " profile_protected = LineString([Point(x_coord, z_coord) for x_coord, z_coord\n", " in zip(profile_line.xy[0], profile_line.xy[1])\n", " if d_low_x < x_coord < lower_profile_intersection.xy[0][0]]\n", " + [lower_profile_intersection]\n", " + [d_protected_intersection]\n", " + [Point(d_low_x, d_low_z)])\n", "\n", " # Convert to polygon and return the area (m3/m)\n", " protection_area_poly = Polygon(profile_protected)\n", " protection_area_vol = protection_area_poly.area\n", " return protection_area_vol\n", "\n", "\n", "def get_berm_width(z, d_low_x):\n", " \"\"\"\n", " Returns the width of the berm, defined by the distance between dune toe to z=0\n", " \"\"\"\n", " x_seaward_limit = z.dropna().tail(1).reset_index().x[0]\n", " return x_seaward_limit - d_low_x\n", "\n", "\n", "\n", "\n", "site_id = 'NARRA0018'\n", "profile_type = 'prestorm'\n", "query = \"site_id == '{}' and profile_type == '{}'\".format(\n", " site_id, profile_type)\n", "prestorm_profile = df_profiles.query(query)\n", "profile_features = df_profile_features.query(query)\n", "\n", "x = prestorm_profile.index.get_level_values('x')\n", "z = prestorm_profile.z\n", "d_low_x = profile_features.dune_toe_x.tolist()[0]\n", "d_low_z = profile_features.dune_toe_z.tolist()[0]\n", "angle = 60 # degrees from the horizontal\n", "lower_z = 0.5 # from mhw\n", "\n", "vol = collision_protection_vol(x, z, d_low_x, d_low_z, lower_z, angle)\n", "berm_width = get_berm_width(z, d_low_x)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T02:45:54.224110Z", "start_time": "2018-12-05T02:45:54.030142Z" } }, "outputs": [], "source": [ "from datetime import timedelta\n", "\n", "def wl_time(t, wl, z_lower, z_upper):\n", " \"\"\"\n", " Returns the amount of time the water level is between two elevations.\n", " \"\"\"\n", " df_wl = pd.DataFrame.from_records([(t_val, R2_val) for t_val, R2_val in zip(t,R2)], columns=['datetime','wl'])\n", " df_wl.set_index(pd.DatetimeIndex(df_wl['datetime']),inplace=True)\n", " df_wl.drop(columns=['datetime'], inplace=True)\n", " \n", " # Assumes that each record is one hour... probably need to check this\n", " hours = len(df_wl.query('{} < wl < {}'.format(z_lower, z_upper)))\n", " return timedelta(hours=hours)\n", "\n", "def wave_power(t, wl, z_lower, z_upper, Hs0, Tp):\n", " \"\"\"\n", " Returns the cumulative wave power when the water level is between two elevations.\n", " \"\"\"\n", " df_wl = pd.DataFrame.from_records([(t_val, R2_val,Hs0_val,Tp_val) for t_val, R2_val,Hs0_val,Tp_val in zip(t,R2,Hs0,Tp)], columns=['datetime','wl', 'Hs0','Tp'])\n", " df_wl.set_index(pd.DatetimeIndex(df_wl['datetime']),inplace=True)\n", " df_wl.drop(columns=['datetime'], inplace=True)\n", " \n", " # Assumes that each record is one hour... probably need to check this\n", " rho = 1025 # kg/m3\n", " g = 9.8 # m/s2\n", " df_wl_times = df_wl.query('{} < wl < {}'.format(z_lower, z_upper))\n", " power = rho * g ** 2 / 64 / np.pi * df_wl_times.Hs0 ** 2 * df_wl_times.Tp\n", " return power.sum()\n", "\n", "df_twl = twls['forecasted']['mean_slope_sto06']\n", "df_twl_site = df_twl.query(\"site_id == '{}'\".format(site_id))\n", "\n", "R2 = df_twl_site.R2.tolist()\n", "t = df_twl_site.index.get_level_values('datetime')\n", "z_lower = 0.5\n", "z_upper = d_low_z\n", "\n", "exposed_time = wl_time(t, R2, z_lower,z_upper)\n", "\n", "Hs0 = df_twl.Hs0.tolist()\n", "Tp = df_twl.Tp.tolist()\n", "wave_p = wave_power(t, R2, z_lower,z_upper,Hs0, Tp)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T02:45:54.231129Z", "start_time": "2018-12-05T02:45:54.225660Z" } }, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 57, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T03:37:45.472885Z", "start_time": "2018-12-05T03:37:45.462857Z" } }, "outputs": [ { "data": { "text/plain": [ "0.96" ] }, "execution_count": 57, "metadata": {}, "output_type": "execute_result" } ], "source": [ "def dune_toe_elevation_change(site_id, df_profile_features):\n", " query = \"site_id == '{}'\".format(site_id)\n", " profile_features = df_profile_features.query(query)\n", " prestorm_dune_toe_z = profile_features.query(\"profile_type=='prestorm'\").dune_toe_z.tolist()[0]\n", " poststorm_dune_toe_z = profile_features.query(\"profile_type=='poststorm'\").dune_toe_z.tolist()[0]\n", " return prestorm_dune_toe_z - poststorm_dune_toe_z\n", "\n", "toe_ele_change = dune_toe_elevation_change(\"MANNING0081\", df_profile_features)\n", "toe_ele_change" ] }, { "cell_type": "code", "execution_count": 62, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T03:45:45.203827Z", "start_time": "2018-12-05T03:45:13.608478Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0 of 816\n", "20 of 816\n", "40 of 816\n", "60 of 816\n", "80 of 816\n", "100 of 816\n" ] } ], "source": [ "vols = []\n", "exposed_times = []\n", "toe_ele_changes = []\n", "wave_powers = []\n", "berm_widths = []\n", "swash_vol_changes = []\n", "dune_face_vol_changes = []\n", "site_ids_to_plot = []\n", "\n", "# Get site ids where we observed collision\n", "observed_site_ids = impacts['observed'].query(\"storm_regime=='collision'\").index.get_level_values('site_id').unique()\n", "\n", "# # Get site ids where we forecast swash\n", "# forecasted_site_ids = impacts['forecasted']['mean_slope_sto06'].query(\"storm_regime=='swash'\").index.get_level_values('site_id').unique()\n", "\n", "# site_ids = set(observed_site_ids).intersection(set(forecasted_site_ids))\n", "\n", "site_ids = observed_site_ids\n", "\n", "# Calculate for each site\n", "\n", "for n, site_id in enumerate(site_ids):\n", " \n", " if n%20 ==0:\n", " print('{} of {}'.format(n, len(site_ids)))\n", " \n", " try:\n", " query = \"site_id == '{}' and profile_type == '{}'\".format(site_id, 'prestorm')\n", " prestorm_profile = df_profiles.query(query)\n", " profile_features = df_profile_features.query(query)\n", "\n", " vol = collision_protection_vol(x = prestorm_profile.index.get_level_values('x'),\n", " z = prestorm_profile.z,\n", " d_low_x = profile_features.dune_toe_x.tolist()[0],\n", " d_low_z = profile_features.dune_toe_z.tolist()[0],\n", " lower_z = profile_features.dune_toe_z.tolist()[0] - 2, # from mhw\n", " angle = 60, # degrees from the horizontal\n", " )\n", " \n", " df_twl = twls['forecasted']['mean_slope_sto06']\n", " df_twl_site = df_twl.query(\"site_id == '{}'\".format(site_id))\n", " \n", " berm_width = get_berm_width(z = prestorm_profile.z,\n", " d_low_x = profile_features.dune_toe_x.tolist()[0]) \n", " \n", " exposed_time = wl_time(t = df_twl_site.index.get_level_values('datetime'),\n", " wl = df_twl_site.R2.tolist(),\n", " z_lower = profile_features.dune_toe_z.tolist()[0] -2,\n", " z_upper = profile_features.dune_toe_z.tolist()[0],\n", " )\n", " swash_vol_change = impacts['observed'].query(\"site_id == '{}'\".format(site_id)).swash_vol_change.tolist()[0]\n", " dune_face_vol_change = impacts['observed'].query(\"site_id == '{}'\".format(site_id)).dune_face_vol_change.tolist()[0]\n", " \n", " power = wave_power(t = df_twl_site.index.get_level_values('datetime'),\n", " wl = df_twl_site.R2.tolist(),\n", " z_lower = profile_features.dune_toe_z.tolist()[0] -2,\n", " z_upper = profile_features.dune_toe_z.tolist()[0],\n", " Hs0=df_twl_site.Hs0.tolist(),\n", " Tp=df_twl_site.Tp.tolist())\n", " \n", " toe_ele_change = dune_toe_elevation_change(site_id, df_profile_features)\n", " except:\n", " continue\n", " \n", "# print(site_id, toe_ele_change)\n", " vols.append(vol)\n", " exposed_times.append(exposed_time)\n", " toe_ele_changes.append(toe_ele_change)\n", " wave_powers.append(power)\n", " berm_widths.append(berm_width)\n", " swash_vol_changes.append(swash_vol_change)\n", " dune_face_vol_changes.append(dune_face_vol_change)\n", " site_ids_to_plot.append(site_id)\n", " \n", " if n>100:\n", " break\n", "\n", " \n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2018-12-03T03:12:11.598150Z", "start_time": "2018-12-03T03:12:11.590128Z" } }, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 72, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T05:03:39.147413Z", "start_time": "2018-12-05T05:03:39.070207Z" } }, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "225855bac0d0464d9be74917812c19ac", "version_major": 2, "version_minor": 0 }, "text/plain": [ "FigureWidget({\n", " 'data': [{'marker': {'size': 4},\n", " 'mode': 'markers',\n", " 'text': [-0…" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "trace1 = go.Scatter(\n", " x=berm_widths,\n", " y=dune_face_vol_changes,\n", " text = ['{}
{}'.format(ele, site_id) for ele,site_id in zip(toe_ele_changes,site_ids_to_plot)],\n", " mode='markers',\n", " marker=dict(\n", " size=4,\n", "# color = [-1 if x<0 else 1 for x in toe_ele_changes],\n", "# color = toe_ele_changes,\n", "# color = dune_face_vol_changes,\n", "# color = [x.total_seconds() / 60 / 60 for x in exposed_times],\n", "# colorscale='Viridis',\n", "# showscale=True\n", " ))\n", "\n", "layout = go.Layout(\n", " title='Dune Collision Protection',\n", "# height=300,\n", "# legend=dict(font={'size': 10}),\n", "# margin=dict(t=50, b=50, l=50, r=20),\n", " xaxis=dict(\n", " title='Berm width',\n", " autorange=True,\n", " showgrid=True,\n", " zeroline=True,\n", " showline=True,\n", " ),\n", " yaxis=dict(\n", " title='Dune face vol change',\n", " autorange=True,\n", " showgrid=True,\n", " zeroline=True,\n", " showline=True,\n", " ))\n", "\n", "g_plot = go.FigureWidget(data=[trace1], layout=layout)\n", "g_plot" ] }, { "cell_type": "code", "execution_count": 51, "metadata": { "ExecuteTime": { "end_time": "2018-12-05T03:15:46.517975Z", "start_time": "2018-12-05T03:15:46.512936Z" } }, "outputs": [ { "data": { "text/plain": [ "[64.5799,\n", " 21.0163,\n", " 38.106,\n", " 28.101,\n", " 58.7247,\n", " 33.5534,\n", " 71.1675,\n", " 52.6043,\n", " 50.5765,\n", " 39.9074,\n", " 67.8385,\n", " 43.9043,\n", " 39.8181,\n", " 37.7153,\n", " 20.4454,\n", " 39.7757,\n", " 42.1843,\n", " 33.6152,\n", " 42.9587,\n", " 39.9773,\n", " 35.7835,\n", " 31.2884,\n", " -0.4618,\n", " 31.0094,\n", " 33.3479,\n", " 47.8394,\n", " 32.3566,\n", " 36.5205,\n", " 45.7109,\n", " 16.0687,\n", " 35.4375,\n", " 43.327,\n", " 53.5016,\n", " 31.0357,\n", " 47.6528,\n", " 25.5658,\n", " 41.0514,\n", " 28.1645,\n", " 44.5443,\n", " 42.925,\n", " 33.9535,\n", " 36.2626,\n", " 35.2536]" ] }, "execution_count": 51, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# impacts['observed']\n", "swash_vol_changes" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" }, "toc": { "base_numbering": 1, "nav_menu": {}, "number_sections": true, "sideBar": true, "skip_h1_title": false, "title_cell": "Table of Contents", "title_sidebar": "Contents", "toc_cell": false, "toc_position": {}, "toc_section_display": true, "toc_window_display": true }, "varInspector": { "cols": { "lenName": 16, "lenType": 16, "lenVar": 40 }, "kernels_config": { "python": { "delete_cmd_postfix": "", "delete_cmd_prefix": "del ", "library": "var_list.py", "varRefreshCmd": "print(var_dic_list())" }, "r": { "delete_cmd_postfix": ") ", "delete_cmd_prefix": "rm(", "library": "var_list.r", "varRefreshCmd": "cat(var_dic_list()) " } }, "types_to_exclude": [ "module", "function", "builtin_function_or_method", "instance", "_Feature" ], "window_display": false } }, "nbformat": 4, "nbformat_minor": 2 }