Compare commits
12 Commits
6c5444fc06
...
3f2cfa50aa
Author | SHA1 | Date |
---|---|---|
|
3f2cfa50aa | 6 years ago |
|
614a55a44f | 6 years ago |
|
20709c6f8f | 6 years ago |
|
8d98eda308 | 6 years ago |
|
daad9c5b82 | 6 years ago |
|
4b9e69f2f0 | 6 years ago |
|
12070a1acf | 6 years ago |
|
91a0d8327e | 6 years ago |
|
b7704c2d35 | 6 years ago |
|
65f5f2b2c0 | 6 years ago |
|
947d2093bd | 6 years ago |
|
44e03892f3 | 6 years ago |
@ -1,161 +1,39 @@
|
|||||||
name: C:\Users\z5189959\Desktop\nsw-2016-storm-impact\.venv
|
|
||||||
channels:
|
channels:
|
||||||
- plotly
|
|
||||||
- defaults
|
|
||||||
- conda-forge
|
- conda-forge
|
||||||
|
- plotly
|
||||||
dependencies:
|
dependencies:
|
||||||
- black=18.9b0=py_0
|
- python=3.6
|
||||||
- colorlover=0.2.1=py_0
|
- attrs
|
||||||
- jupyter_contrib_core=0.3.3=py_2
|
- autopep8
|
||||||
- jupyter_contrib_nbextensions=0.5.0=py36_1000
|
- black
|
||||||
- jupyter_highlight_selected_word=0.2.0=py36_1000
|
- click
|
||||||
- jupyter_latex_envs=1.4.4=py36_1000
|
- click-plugins
|
||||||
- jupyter_nbextensions_configurator=0.4.0=py36_1000
|
- colorlover
|
||||||
- matplotlib-base=3.0.2=py36h3e3dc42_1001
|
- fiona
|
||||||
- appdirs=1.4.3=py36h28b3542_0
|
- ipykernel
|
||||||
- asn1crypto=0.24.0=py36_0
|
- ipython
|
||||||
- attrs=18.2.0=py36h28b3542_0
|
- ipywidgets
|
||||||
- autopep8=1.4.3=py36_0
|
- matplotlib
|
||||||
- backcall=0.1.0=py36_0
|
- nbformat
|
||||||
- blas=1.0=mkl
|
- notebook
|
||||||
- bleach=3.0.2=py36_0
|
- numpy
|
||||||
- boost=1.67.0=py36_4
|
- pandas
|
||||||
- boost-cpp=1.67.0=hfa6e2cd_4
|
- pandoc
|
||||||
- ca-certificates=2018.03.07=0
|
- pip
|
||||||
- certifi=2018.10.15=py36_0
|
- plotly
|
||||||
- cffi=1.11.5=py36h74b6da3_1
|
- plotly-orca
|
||||||
- chardet=3.0.4=py36_1
|
- proj4
|
||||||
- click=7.0=py36_0
|
- pyproj
|
||||||
- click-plugins=1.0.4=py36_0
|
- python-dateutil
|
||||||
- cligj=0.5.0=py36_0
|
- pytz
|
||||||
- colorama=0.4.0=py36_0
|
- pyyaml
|
||||||
- cryptography=2.3.1=py36h74b6da3_0
|
- requests
|
||||||
- curl=7.62.0=h2a8f88b_0
|
- scikit-learn
|
||||||
- cycler=0.10.0=py36h009560c_0
|
- scipy
|
||||||
- decorator=4.3.0=py36_0
|
- setuptools
|
||||||
- entrypoints=0.2.3=py36_2
|
- shapely
|
||||||
- expat=2.2.5=he025d50_0
|
- yaml
|
||||||
- fiona=1.7.10=py36h5bf8d1d_0
|
- yapf
|
||||||
- freetype=2.9.1=ha9979f8_1
|
|
||||||
- freexl=1.0.5=hfa6e2cd_0
|
|
||||||
- gdal=2.2.2=py36hcebd033_1
|
|
||||||
- geos=3.6.2=h9ef7328_2
|
|
||||||
- geotiff=1.4.2=hd5bfa41_0
|
|
||||||
- hdf4=4.2.13=h712560f_2
|
|
||||||
- hdf5=1.10.1=h98b8871_1
|
|
||||||
- icc_rt=2017.0.4=h97af966_0
|
|
||||||
- icu=58.2=ha66f8fd_1
|
|
||||||
- idna=2.7=py36_0
|
|
||||||
- intel-openmp=2019.1=144
|
|
||||||
- ipykernel=5.1.0=py36h39e3cac_0
|
|
||||||
- ipython=7.2.0=py36h39e3cac_0
|
|
||||||
- ipython_genutils=0.2.0=py36h3c5d0ee_0
|
|
||||||
- ipywidgets=7.4.2=py36_0
|
|
||||||
- jedi=0.13.1=py36_0
|
|
||||||
- jinja2=2.10=py36_0
|
|
||||||
- jpeg=9b=hb83a4c4_2
|
|
||||||
- jsonschema=2.6.0=py36h7636477_0
|
|
||||||
- jupyter_client=5.2.3=py36_0
|
|
||||||
- jupyter_core=4.4.0=py36_0
|
|
||||||
- kealib=1.4.7=ha5b336b_5
|
|
||||||
- kiwisolver=1.0.1=py36h6538335_0
|
|
||||||
- krb5=1.16.1=h038dc86_6
|
|
||||||
- libboost=1.67.0=hfd51bdf_4
|
|
||||||
- libcurl=7.62.0=h2a8f88b_0
|
|
||||||
- libgdal=2.2.2=h2727f2b_1
|
|
||||||
- libiconv=1.15=h1df5818_7
|
|
||||||
- libkml=1.3.0=he5f2a48_4
|
|
||||||
- libnetcdf=4.4.1.1=h825a56a_8
|
|
||||||
- libpng=1.6.35=h2a8f88b_0
|
|
||||||
- libpq=10.5=h5fe2233_0
|
|
||||||
- libsodium=1.0.16=h9d3ae62_0
|
|
||||||
- libspatialite=4.3.0a=h383548d_18
|
|
||||||
- libssh2=1.8.0=hd619d38_4
|
|
||||||
- libtiff=4.0.9=h36446d0_2
|
|
||||||
- libxml2=2.9.8=hadb2253_1
|
|
||||||
- libxslt=1.1.32=hf6f1972_0
|
|
||||||
- lxml=4.2.5=py36hef2cd61_0
|
|
||||||
- m2w64-gcc-libgfortran=5.3.0=6
|
|
||||||
- m2w64-gcc-libs=5.3.0=7
|
|
||||||
- m2w64-gcc-libs-core=5.3.0=7
|
|
||||||
- m2w64-gmp=6.1.0=2
|
|
||||||
- m2w64-libwinpthread-git=5.0.0.4634.697f757=2
|
|
||||||
- markupsafe=1.1.0=py36he774522_0
|
|
||||||
- matplotlib=3.0.1=py36hc8f65d3_0
|
|
||||||
- mistune=0.8.4=py36he774522_0
|
|
||||||
- mkl=2018.0.3=1
|
|
||||||
- mkl_fft=1.0.6=py36hdbbee80_0
|
|
||||||
- mkl_random=1.0.1=py36h77b88f5_1
|
|
||||||
- msys2-conda-epoch=20160418=1
|
|
||||||
- munch=2.3.2=py36_0
|
|
||||||
- nbconvert=5.3.1=py36_0
|
|
||||||
- nbformat=4.4.0=py36h3a5bc1b_0
|
|
||||||
- notebook=5.7.2=py36_0
|
|
||||||
- numpy=1.15.4=py36ha559c80_0
|
|
||||||
- numpy-base=1.15.4=py36h8128ebf_0
|
|
||||||
- openjpeg=2.3.0=h5ec785f_1
|
|
||||||
- openssl=1.0.2p=hfa6e2cd_0
|
|
||||||
- pandas=0.23.4=py36h830ac7b_0
|
|
||||||
- pandoc=2.2.3.2=0
|
|
||||||
- pandocfilters=1.4.2=py36_1
|
|
||||||
- parso=0.3.1=py36_0
|
|
||||||
- pickleshare=0.7.5=py36_0
|
|
||||||
- pip=18.1=py36_0
|
|
||||||
- plotly=3.4.2=py36h28b3542_0
|
|
||||||
- proj4=4.9.3=hcf24537_7
|
|
||||||
- prometheus_client=0.4.2=py36_0
|
|
||||||
- prompt_toolkit=2.0.7=py36_0
|
|
||||||
- psutil=5.4.8=py36he774522_0
|
|
||||||
- py-boost=1.67.0=py36h8300f20_4
|
|
||||||
- pycodestyle=2.4.0=py36_0
|
|
||||||
- pycparser=2.19=py36_0
|
|
||||||
- pygments=2.2.0=py36hb010967_0
|
|
||||||
- pyopenssl=18.0.0=py36_0
|
|
||||||
- pyparsing=2.3.0=py36_0
|
|
||||||
- pyproj=1.9.5.1=py36_0
|
|
||||||
- pyqt=5.9.2=py36h6538335_2
|
|
||||||
- pysocks=1.6.8=py36_0
|
|
||||||
- python=3.6.7=h33f27b4_1
|
|
||||||
- python-dateutil=2.7.5=py36_0
|
|
||||||
- pytz=2018.7=py36_0
|
|
||||||
- pywinpty=0.5.4=py36_0
|
|
||||||
- pyyaml=3.13=py36hfa6e2cd_0
|
|
||||||
- pyzmq=17.1.2=py36hfa6e2cd_0
|
|
||||||
- qt=5.9.6=vc14h1e9a669_2
|
|
||||||
- requests=2.20.1=py36_0
|
|
||||||
- retrying=1.3.3=py36_2
|
|
||||||
- scikit-learn=0.20.1=py36hb854c30_0
|
|
||||||
- scipy=1.1.0=py36h4f6bf74_1
|
|
||||||
- send2trash=1.5.0=py36_0
|
|
||||||
- setuptools=40.6.2=py36_0
|
|
||||||
- shapely=1.6.4=py36hc90234e_0
|
|
||||||
- sip=4.19.8=py36h6538335_0
|
|
||||||
- six=1.11.0=py36_1
|
|
||||||
- sqlite=3.25.3=he774522_0
|
|
||||||
- terminado=0.8.1=py36_1
|
|
||||||
- testpath=0.4.2=py36_0
|
|
||||||
- tk=8.6.8=hfa6e2cd_0
|
|
||||||
- toml=0.10.0=py36h28b3542_0
|
|
||||||
- tornado=5.1.1=py36hfa6e2cd_0
|
|
||||||
- traitlets=4.3.2=py36h096827d_0
|
|
||||||
- urllib3=1.23=py36_0
|
|
||||||
- vc=14.1=h0510ff6_4
|
|
||||||
- vs2015_runtime=14.15.26706=h3a45250_0
|
|
||||||
- wcwidth=0.1.7=py36h3d5aa90_0
|
|
||||||
- webencodings=0.5.1=py36_1
|
|
||||||
- wheel=0.32.3=py36_0
|
|
||||||
- widgetsnbextension=3.4.2=py36_0
|
|
||||||
- win_inet_pton=1.0.1=py36_1
|
|
||||||
- wincertstore=0.2=py36h7fe50ca_0
|
|
||||||
- winpty=0.4.3=4
|
|
||||||
- xerces-c=3.2.2=ha925a31_0
|
|
||||||
- xz=5.2.4=h2fa13f4_4
|
|
||||||
- yaml=0.1.7=hc54c509_2
|
|
||||||
- yapf=0.25.0=py36_0
|
|
||||||
- zeromq=4.2.5=he025d50_1
|
|
||||||
- zlib=1.2.11=h62dcd97_3
|
|
||||||
- plotly-orca=1.1.1=1
|
|
||||||
- pip:
|
- pip:
|
||||||
- blackcellmagic==0.0.1
|
- blackcellmagic
|
||||||
- mat4py==0.4.1
|
- mat4py
|
||||||
prefix: C:\Users\z5189959\Desktop\nsw-2016-storm-impact\.venv
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -0,0 +1,103 @@
|
|||||||
|
"""
|
||||||
|
After generating interim data files based on raw data, we may need to overwrite some rows with manual data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
import click
|
||||||
|
from utils import setup_logging
|
||||||
|
|
||||||
|
logger = setup_logging()
|
||||||
|
|
||||||
|
|
||||||
|
def overwrite_profile_features(df_interim, df_overwrite, df_profiles, overwrite=True):
|
||||||
|
"""
|
||||||
|
Overwrite the interim profile features file with an excel file.
|
||||||
|
:param interim_file: Should be './data/interim/profile_features.csv'
|
||||||
|
:param overwrite_file: Should be './data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv'
|
||||||
|
:param overwrite: Whether or not to overwrite the original interim_file. If false, file will not be written
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Merge
|
||||||
|
df_merged = df_interim.merge(df_overwrite, left_index=True, right_index=True, suffixes=["", "_overwrite"])
|
||||||
|
|
||||||
|
# Remove x vals if overwrite file as remove
|
||||||
|
df_merged.loc[df_merged.dune_crest_x_overwrite == "remove", "dune_crest_x"] = np.nan
|
||||||
|
df_merged.loc[df_merged.dune_toe_x_overwrite == "remove", "dune_toe_x"] = np.nan
|
||||||
|
|
||||||
|
# Put in new x vals. Note that a NaN value in the overwrite column, means keep the original value.
|
||||||
|
idx = (df_merged.dune_crest_x_overwrite.notnull()) & (df_merged.dune_crest_x_overwrite != "remove")
|
||||||
|
df_merged.loc[idx, "dune_crest_x"] = df_merged.loc[idx, "dune_crest_x_overwrite"]
|
||||||
|
|
||||||
|
idx = (df_merged.dune_toe_x_overwrite.notnull()) & (df_merged.dune_toe_x_overwrite != "remove")
|
||||||
|
df_merged.loc[idx, "dune_toe_x"] = df_merged.loc[idx, "dune_toe_x_overwrite"]
|
||||||
|
|
||||||
|
# Recalculate z values from x coordinates
|
||||||
|
for site_id in df_merged.index.get_level_values("site_id").unique():
|
||||||
|
|
||||||
|
logger.info("Overwriting dune crest/toes with manual values: {}".format(site_id))
|
||||||
|
|
||||||
|
# Get profiles
|
||||||
|
df_profile = df_profiles.query('site_id=="{}"'.format(site_id))
|
||||||
|
|
||||||
|
for param in ["prestorm", "poststorm"]:
|
||||||
|
for loc in ["crest", "toe"]:
|
||||||
|
|
||||||
|
# Get x value to find corresponding z value
|
||||||
|
x_val = df_merged.loc[(site_id, param), "dune_{}_x".format(loc)]
|
||||||
|
if np.isnan(x_val):
|
||||||
|
df_merged.loc[(site_id, param), "dune_{}_z".format(loc)] = np.nan
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the corresponding z value for our x value
|
||||||
|
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, param, x_val)
|
||||||
|
|
||||||
|
# Try get the value from the other profile if we return nan or empty dataframe
|
||||||
|
if df_profile.query(query).empty:
|
||||||
|
if param == "prestorm":
|
||||||
|
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, "poststorm", x_val)
|
||||||
|
elif param == "poststorm":
|
||||||
|
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, "prestorm", x_val)
|
||||||
|
z_val = df_profile.query(query).iloc[0].z
|
||||||
|
|
||||||
|
else:
|
||||||
|
z_val = df_profile.query(query).iloc[0].z
|
||||||
|
|
||||||
|
# Put results back into merged dataframe
|
||||||
|
df_merged.loc[(site_id, param), "dune_{}_z".format(loc)] = z_val
|
||||||
|
|
||||||
|
# Drop columns
|
||||||
|
df_merged = df_merged.drop(columns=["dune_crest_x_overwrite", "dune_toe_x_overwrite", "comment"], errors="ignore")
|
||||||
|
|
||||||
|
# Merge back into interim data frame. Use concat/duplicates since .update will not update nan values
|
||||||
|
df_final = pd.concat([df_merged, df_interim])
|
||||||
|
df_final = df_final[~df_final.index.duplicated(keep="first")]
|
||||||
|
df_final = df_final.sort_index()
|
||||||
|
|
||||||
|
# Write to file
|
||||||
|
return df_final
|
||||||
|
|
||||||
|
|
||||||
|
@click.command(short_help="overwrite profile_features with manual excel sheet")
|
||||||
|
@click.option("--interim_file", required=True, help="path of profile_features.csv")
|
||||||
|
@click.option("--overwrite_file", required=True, help="path of excel file with overwrite data")
|
||||||
|
@click.option("--profile_file", required=True, help="path of profiles.csv")
|
||||||
|
@click.option("--overwrite/--no-overwrite", default=True)
|
||||||
|
def apply_profile_features_overwrite(interim_file, overwrite_file, profile_file, overwrite):
|
||||||
|
logger.info("Overwriting profile features with manual excel file")
|
||||||
|
|
||||||
|
# Load files
|
||||||
|
df_interim = pd.read_csv(interim_file, index_col=[0, 1])
|
||||||
|
df_overwrite = pd.read_excel(overwrite_file)
|
||||||
|
df_profiles = pd.read_csv(profile_file, index_col=[0, 1, 2])
|
||||||
|
if "site_id" in df_overwrite.columns and "profile_type" in df_overwrite.columns:
|
||||||
|
df_overwrite = df_overwrite.set_index(["site_id", "profile_type"])
|
||||||
|
|
||||||
|
# Replace interim values with overwrite values
|
||||||
|
df_interim = overwrite_profile_features(df_interim, df_overwrite, df_profiles, overwrite)
|
||||||
|
|
||||||
|
# Write to csv
|
||||||
|
df_interim.to_csv(interim_file, float_format="%.3f")
|
||||||
|
|
||||||
|
logger.info("Done!")
|
@ -1,34 +1,333 @@
|
|||||||
"""
|
"""
|
||||||
Converts .csv files to .shape files
|
Converts .csv files to .shape files
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
|
import numpy.ma as ma
|
||||||
import click
|
import click
|
||||||
import fiona
|
import fiona
|
||||||
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from fiona.crs import from_epsg
|
from fiona.crs import from_epsg
|
||||||
from shapely.geometry import Point, mapping
|
from shapely.geometry import Point, mapping, LineString
|
||||||
|
from collections import OrderedDict
|
||||||
|
from data.parse_shp import convert_coord_systems
|
||||||
from utils import setup_logging
|
from utils import setup_logging
|
||||||
|
|
||||||
logger = setup_logging()
|
logger = setup_logging()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
|
||||||
|
|
||||||
|
sites_csv = './data/interim/sites.csv'
|
||||||
|
profiles_csv = './data/interim/profiles.csv'
|
||||||
|
impacts_csv = './data/interim/impacts_forecasted_mean_slope_sto06.csv'
|
||||||
|
output_geojson = './data/interim/R_high_forecasted_mean_slope_sto06.geojson'
|
||||||
|
|
||||||
|
df_sites = pd.read_csv(sites_csv, index_col=[0])
|
||||||
|
df_profiles = pd.read_csv(profiles_csv, index_col=[0,1,2])
|
||||||
|
df_impacts = pd.read_csv(impacts_csv, index_col=[0])
|
||||||
|
|
||||||
|
# Create geojson file
|
||||||
|
schema = {
|
||||||
|
'geometry': 'Point',
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('beach', 'str'),
|
||||||
|
('site_id', 'str'),
|
||||||
|
('elevation', 'float'),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
|
||||||
|
for index, row in df_impacts.iterrows():
|
||||||
|
|
||||||
|
site_id = index
|
||||||
|
beach = index[:-4]
|
||||||
|
|
||||||
|
# Find lat/lon of R_high position
|
||||||
|
R_high_z = row['R_high']
|
||||||
|
|
||||||
|
|
||||||
|
# Get poststorm profile (or should this be prestorm?)
|
||||||
|
df_profile = df_profiles.query('site_id=="{}" & profile_type=="prestorm"'.format(index))
|
||||||
|
int_x = crossings(df_profile.index.get_level_values('x').tolist(),
|
||||||
|
df_profile.z.tolist(),
|
||||||
|
R_high_z)
|
||||||
|
|
||||||
|
# Take most landward interesection. Continue to next site if there is no intersection
|
||||||
|
try:
|
||||||
|
int_x = max(int_x)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get lat/lon on intercept position
|
||||||
|
site = df_sites.loc[site_id]
|
||||||
|
center_profile_x = site["profile_x_lat_lon"]
|
||||||
|
orientation = site["orientation"]
|
||||||
|
center_lat_lon = Point(site['lon'], site['lat']) # Get lat/lon of center of profile
|
||||||
|
center_xy = convert_coord_systems(center_lat_lon)
|
||||||
|
center_x, center_y = center_xy.xy
|
||||||
|
|
||||||
|
# Calculate xy position of point and convert to lat/lon
|
||||||
|
point_x = center_x + (center_profile_x - int_x) * np.cos(np.deg2rad(orientation))
|
||||||
|
point_y = center_y + (center_profile_x - int_x) * np.sin(np.deg2rad(orientation))
|
||||||
|
point_xy = Point(point_x, point_y)
|
||||||
|
point_lat_lon = convert_coord_systems(point_xy,
|
||||||
|
in_coord_system="EPSG:28356",
|
||||||
|
out_coord_system="EPSG:4326")
|
||||||
|
|
||||||
|
prop = OrderedDict([
|
||||||
|
('beach', beach),
|
||||||
|
('site_id', site_id),
|
||||||
|
('elevation', R_high_z),
|
||||||
|
])
|
||||||
|
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def crossings(profile_x, profile_z, constant_z):
|
||||||
|
"""
|
||||||
|
Finds the x coordinate of a z elevation for a beach profile. Much faster than using shapely to calculate
|
||||||
|
intersections since we are only interested in intersections of a constant value. Will return multiple
|
||||||
|
intersections if found. Used in calculating beach slope.
|
||||||
|
Adapted from https://stackoverflow.com/a/34745789
|
||||||
|
:param profile_x: List of x coordinates for the beach profile section
|
||||||
|
:param profile_z: List of z coordinates for the beach profile section
|
||||||
|
:param constant_z: Float of the elevation to find corresponding x coordinates
|
||||||
|
:return: List of x coordinates which correspond to the constant_z
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Remove nans to suppress warning messages
|
||||||
|
valid = ~ma.masked_invalid(profile_z).mask
|
||||||
|
profile_z = np.array(profile_z)[valid]
|
||||||
|
profile_x = np.array(profile_x)[valid]
|
||||||
|
|
||||||
|
# Normalize the 'signal' to zero.
|
||||||
|
# Use np.subtract rather than a list comprehension for performance reasons
|
||||||
|
z = np.subtract(profile_z, constant_z)
|
||||||
|
|
||||||
|
# Find all indices right before any crossing.
|
||||||
|
# TODO Sometimes this can give a runtime warning https://stackoverflow.com/a/36489085
|
||||||
|
indicies = np.where(z[:-1] * z[1:] < 0)[0]
|
||||||
|
|
||||||
|
# Use linear interpolation to find intersample crossings.
|
||||||
|
return [profile_x[i] - (profile_x[i] - profile_x[i + 1]) / (z[i] - z[i + 1]) * (z[i]) for i in indicies]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--sites-csv", required=True, help=".csv file to convert")
|
||||||
|
@click.option("--profile-features-csv", required=True, help=".csv file to convert")
|
||||||
|
@click.option("--output-geojson", required=True, help="where to store .geojson file")
|
||||||
|
def profile_features_to_geojson(sites_csv, profile_features_csv, output_geojson):
|
||||||
|
"""
|
||||||
|
Converts profile_features containing dune toes and crest locations to a geojson we can load into QGIS
|
||||||
|
:param sites_csv:
|
||||||
|
:param profiles_csv:
|
||||||
|
:param profile_features_csv:
|
||||||
|
:param output_geojson:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
logger.info("Creating profile features geojson")
|
||||||
|
|
||||||
|
# Read files from interim folder
|
||||||
|
df_sites = pd.read_csv(sites_csv, index_col=[0])
|
||||||
|
df_profile_features = pd.read_csv(profile_features_csv, index_col=[0])
|
||||||
|
|
||||||
|
# Create geojson file
|
||||||
|
schema = {
|
||||||
|
'geometry': 'Point',
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('beach', 'str'),
|
||||||
|
('site_id', 'str'),
|
||||||
|
('point_type', 'str'), # prestorm_dune_toe, prestorm_dune_crest, poststorm_dune_toe, poststorm_dune_crest
|
||||||
|
('profile_type', 'str'),
|
||||||
|
('elevation', 'float'),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
|
||||||
|
for index, row in df_profile_features.iterrows():
|
||||||
|
beach = index[:-4]
|
||||||
|
site_id = index
|
||||||
|
profile_type = row['profile_type']
|
||||||
|
|
||||||
|
for point_type in ['crest', 'toe']:
|
||||||
|
# point_type='crest'
|
||||||
|
elevation = row['dune_{}_z'.format(point_type)]
|
||||||
|
x = row['dune_{}_x'.format(point_type)]
|
||||||
|
|
||||||
|
if np.isnan(x):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Geojsons need to use 'null' instead of 'nan'
|
||||||
|
if np.isnan(elevation):
|
||||||
|
elevation = None
|
||||||
|
|
||||||
|
# Convert x position to lat/lon
|
||||||
|
site = df_sites.loc[site_id]
|
||||||
|
center_profile_x = site["profile_x_lat_lon"]
|
||||||
|
orientation = site["orientation"]
|
||||||
|
center_lat_lon = Point(site['lon'], site['lat']) # Get lat/lon of center of profile
|
||||||
|
center_xy = convert_coord_systems(center_lat_lon)
|
||||||
|
center_x, center_y = center_xy.xy
|
||||||
|
|
||||||
|
# Calculate xy position of point and convert to lat/lon
|
||||||
|
point_x = center_x + (center_profile_x - x) * np.cos(np.deg2rad(orientation))
|
||||||
|
point_y = center_y + (center_profile_x - x) * np.sin(np.deg2rad(orientation))
|
||||||
|
point_xy = Point(point_x, point_y)
|
||||||
|
point_lat_lon = convert_coord_systems(point_xy,
|
||||||
|
in_coord_system="EPSG:28356",
|
||||||
|
out_coord_system="EPSG:4326")
|
||||||
|
|
||||||
|
prop = OrderedDict([
|
||||||
|
('beach', beach),
|
||||||
|
('site_id',site_id),
|
||||||
|
('point_type', point_type),
|
||||||
|
('profile_type', profile_type),
|
||||||
|
('elevation', elevation),
|
||||||
|
])
|
||||||
|
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option("--input-csv", required=True, help=".csv file to convert")
|
@click.option("--input-csv", required=True, help=".csv file to convert")
|
||||||
@click.option("--output-shp", required=True, help="where to store .shp file")
|
@click.option("--output-geojson", required=True, help="where to store .geojson file")
|
||||||
def sites_csv_to_shp(input_csv, output_shp):
|
def sites_csv_to_geojson(input_csv, output_geojson):
|
||||||
"""
|
"""
|
||||||
Converts our dataframe of sites to .shp to load in QGis
|
Converts our dataframe of sites to .geojson to load in QGis. Sites are loaded as linestrings of the profile
|
||||||
|
cross-sections
|
||||||
:param input_csv:
|
:param input_csv:
|
||||||
:param output_shp:
|
:param output_geojson:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
logger.info("Converting %s to %s", input_csv, output_shp)
|
logger.info("Converting %s to %s", input_csv, output_geojson)
|
||||||
df_sites = pd.read_csv(input_csv, index_col=[0])
|
df_sites = pd.read_csv(input_csv, index_col=[0])
|
||||||
logger.info(os.environ.get("GDAL_DATA", None))
|
logger.info(os.environ.get("GDAL_DATA", None))
|
||||||
schema = {"geometry": "Point", "properties": {"beach": "str", "site_id": "str"}}
|
|
||||||
with fiona.open(output_shp, "w", crs=from_epsg(4326), driver="ESRI Shapefile", schema=schema) as output:
|
schema = {
|
||||||
|
'geometry': 'LineString',
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('beach','str'),
|
||||||
|
('site_id', 'str'),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
|
||||||
for index, row in df_sites.iterrows():
|
for index, row in df_sites.iterrows():
|
||||||
point = Point(row["x_200_lon"], row["x_200_lat"])
|
# Work out where center of profile is
|
||||||
prop = {"beach": row["beach"], "site_id": index}
|
orientation = row["orientation"]
|
||||||
output.write({"geometry": mapping(point), "properties": prop})
|
center_profile_x = row["profile_x_lat_lon"]
|
||||||
|
center_lon = row["lon"]
|
||||||
|
center_lat = row["lat"]
|
||||||
|
center_lat_lon = Point(center_lon, center_lat)
|
||||||
|
center_xy = convert_coord_systems(center_lat_lon)
|
||||||
|
center_x, center_y = center_xy.xy
|
||||||
|
|
||||||
|
# Work out where landward profile limit is
|
||||||
|
land_x = center_x + center_profile_x * np.cos(np.deg2rad(orientation))
|
||||||
|
land_y = center_y + center_profile_x * np.sin(np.deg2rad(orientation))
|
||||||
|
land_xy = Point(land_x, land_y)
|
||||||
|
land_lat_lon = convert_coord_systems(land_xy, in_coord_system="EPSG:28356",
|
||||||
|
out_coord_system="EPSG:4326")
|
||||||
|
|
||||||
|
# Work out where seaward profile limit is
|
||||||
|
sea_x = center_x - center_profile_x * np.cos(np.deg2rad(orientation))
|
||||||
|
sea_y = center_y - center_profile_x * np.sin(np.deg2rad(orientation))
|
||||||
|
sea_xy = Point(sea_x, sea_y)
|
||||||
|
sea_lat_lon = convert_coord_systems(sea_xy, in_coord_system="EPSG:28356", out_coord_system="EPSG:4326")
|
||||||
|
|
||||||
|
line_string = LineString([land_lat_lon, center_lat_lon, sea_lat_lon])
|
||||||
|
prop = OrderedDict([("beach", row["beach"]),
|
||||||
|
("site_id", index)])
|
||||||
|
output.write({"geometry": mapping(line_string), "properties": prop})
|
||||||
|
|
||||||
|
logger.info("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--sites-csv", required=True, help="sites.csv file to convert")
|
||||||
|
@click.option("--observed-impacts-csv", required=True, help="impacts-observed.csv file to convert")
|
||||||
|
@click.option("--forecast-impacts-csv", required=True, help="impacts-forecast.csv file to convert")
|
||||||
|
@click.option("--output-geojson", required=True, help="where to store .geojson file")
|
||||||
|
def impacts_to_geojson(sites_csv, observed_impacts_csv, forecast_impacts_csv, output_geojson):
|
||||||
|
"""
|
||||||
|
Converts impacts observed and forecasted to a geojson for visualization in QGIS
|
||||||
|
:param sites_csv:
|
||||||
|
:param observed_impacts_csv:
|
||||||
|
:param forecast_impacts_csv:
|
||||||
|
:param output_geojson:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Get information from .csv and read into pandas dataframe
|
||||||
|
df_sites = pd.read_csv(sites_csv, index_col=[0])
|
||||||
|
df_observed = pd.read_csv(observed_impacts_csv, index_col=[0])
|
||||||
|
df_forecast = pd.read_csv(forecast_impacts_csv, index_col=[0]).rename({'storm_regime': 'forecast_storm_regime'})
|
||||||
|
|
||||||
|
# Rename columns, so we can distinguish between forecast and observed
|
||||||
|
df_observed = df_observed.rename(columns={'storm_regime': 'observed_storm_regime'})
|
||||||
|
df_forecast = df_forecast.rename(columns={'storm_regime': 'forecast_storm_regime'})
|
||||||
|
|
||||||
|
# Concat into one big dataframe
|
||||||
|
df = pd.concat([df_sites, df_observed, df_forecast], sort=True,axis=1)
|
||||||
|
|
||||||
|
# Make new column for accuracy of forecast. Use underpredict/correct/overpredict classes
|
||||||
|
df.loc[df.observed_storm_regime == df.forecast_storm_regime, 'forecast_accuray'] = 'correct'
|
||||||
|
|
||||||
|
# Observed/Forecasted/Class for each combination
|
||||||
|
classes = [('swash', 'collision', 'overpredict'),
|
||||||
|
('swash', 'swash', 'correct'),
|
||||||
|
('swash', 'overwash', 'overpredict'),
|
||||||
|
('collision', 'swash', 'underpredict'),
|
||||||
|
('collision', 'collision', 'correct'),
|
||||||
|
('collision', 'overwash', 'overpredict'),
|
||||||
|
('overwash', 'swash', 'underpredict'),
|
||||||
|
('overwash', 'collision', 'underpredict'),
|
||||||
|
('overwash', 'overwash', 'correct')]
|
||||||
|
for c in classes:
|
||||||
|
df.loc[(df.observed_storm_regime == c[0])&(df.forecast_storm_regime == c[1]), 'forecast_accuracy'] = c[2]
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
'geometry': 'Point',
|
||||||
|
'properties': OrderedDict([
|
||||||
|
('beach','str'),
|
||||||
|
('site_id', 'str'),
|
||||||
|
('forecast_storm_regime', 'str'),
|
||||||
|
('observed_storm_regime', 'str',),
|
||||||
|
('forecast_accuracy', 'str')
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO Impact marker location should be at the seaward end of the profile
|
||||||
|
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
|
||||||
|
for index, row in df.iterrows():
|
||||||
|
|
||||||
|
# Locate the marker at the seaward end of the profile to avoid cluttering the coastline.
|
||||||
|
# Work out where seaward profile limit is
|
||||||
|
orientation = row["orientation"]
|
||||||
|
center_profile_x = row["profile_x_lat_lon"]
|
||||||
|
center_lon = row["lon"]
|
||||||
|
center_lat = row["lat"]
|
||||||
|
center_lat_lon = Point(center_lon, center_lat)
|
||||||
|
center_xy = convert_coord_systems(center_lat_lon)
|
||||||
|
center_x, center_y = center_xy.xy
|
||||||
|
|
||||||
|
sea_x = center_x - center_profile_x * np.cos(np.deg2rad(orientation))
|
||||||
|
sea_y = center_y - center_profile_x * np.sin(np.deg2rad(orientation))
|
||||||
|
sea_xy = Point(sea_x, sea_y)
|
||||||
|
sea_lat_lon = convert_coord_systems(sea_xy, in_coord_system="EPSG:28356", out_coord_system="EPSG:4326")
|
||||||
|
|
||||||
|
prop = OrderedDict([
|
||||||
|
('beach',row["beach"]),
|
||||||
|
('site_id', index),
|
||||||
|
('forecast_storm_regime', row["forecast_storm_regime"]),
|
||||||
|
('observed_storm_regime', row["observed_storm_regime"],),
|
||||||
|
('forecast_accuracy', row["forecast_accuracy"])
|
||||||
|
])
|
||||||
|
|
||||||
|
output.write({"geometry": mapping(sea_lat_lon), "properties": prop})
|
||||||
|
|
||||||
|
|
||||||
logger.info("Done!")
|
logger.info("Done!")
|
||||||
|
Loading…
Reference in New Issue