Compare commits

..

11 Commits

Author SHA1 Message Date
Chris Leaman e6bb50c00e Fix bug when calculating R_high lat/lon geojson 6 years ago
Chris Leaman faa843ce21 Fix formatting 6 years ago
Chris Leaman 3443062d85 Update notebook and QGIS file 6 years ago
Chris Leaman 9755810f40 Refactor PYTHON_CLI command 6 years ago
Chris Leaman 2d22734bfa Fix position of R_high in geojson to be closest to dune face 6 years ago
Chris Leaman 3af90601ef Refactor overwriting dune crest/toes and impacts
Uses one, central .csv file contained in ./data/raw/profile_features_chris_leaman
6 years ago
Chris Leaman e1d95a1752 Improve performance by replacing .query with .loc 6 years ago
Chris Leaman 6912c50a49 Use dune crest for mean slope calculation if no dune toe 6 years ago
Chris Leaman c7090a43b9 Change log level to info 6 years ago
Chris Leaman db3c45e12d Add line_profiler to environment 6 years ago
Chris Leaman df3946d15c Ignore line_profiler files 6 years ago

3
.gitignore vendored

@ -20,4 +20,5 @@ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
/.venv/ /.venv/
*.log *.log
*.py.lprof

@ -46,149 +46,116 @@ pull-data: ##@data Copies data from data backup directory to ./data/
# Process data # Process data
.PHONY: process-mat .PHONY: process-mat
# Command for activating our virtual environment and calling the CLI entry point
PYTHON_CLI = activate ./.venv && python ./src/cli.py
impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_observed.csv ##@products makes obsered and forecasted impacts impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_observed.csv ##@products makes obsered and forecasted impacts
# Calculates beach orientations at each profile
### Parses raw matfiles
./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat ./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat
$(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit" $(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit"
# # Produces a .csv of sites where our beach cross-sections are located
# ./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat
# activate ./.venv && python ./src/data/parse_mat.py create-sites-and-profiles-csv \
# --profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
# --profiles-output-file "./data/interim/profiles.csv" \
# --sites-output-file "./data/interim/sites.csv"
# Produces a .csv of sites where our beach cross-sections are located
./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat ./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat
activate ./.venv && python ./src/cli.py create-sites-and-profiles-csv \ $(PYTHON_CLI) create-sites-and-profiles-csv \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \ --profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--profiles-output-file "./data/interim/profiles.csv" \ --profiles-output-file "./data/interim/profiles.csv" \
--sites-output-file "./data/interim/sites.csv" --sites-output-file "./data/interim/sites.csv"
# Produces a .csv of waves for each site
./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat ./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat
activate ./.venv && python ./src/cli.py create-waves-csv \ $(PYTHON_CLI) create-waves-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \ --waves-mat "./data/raw/processed_shorelines/waves.mat" \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/waves.csv" --output-file "./data/interim/waves.csv"
# Produces a .csv of tides for each site
./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat ./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat
activate ./.venv && python ./src/cli.py create-tides-csv \ $(PYTHON_CLI) create-tides-csv \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \ --tides-mat "./data/raw/processed_shorelines/tides.mat" \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv" --output-file "./data/interim/tides.csv"
# Creates a .shp of our sites to load into QGis ./data/interim/profile_features_crest_toes.csv : ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv
./data/interim/sites.shp: ./data/interim/sites.csv $(PYTHON_CLI) create-crest-toes \
activate ./.venv && python ./src/cli.py sites-csv-to-shp \ --profile-features-csv "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv" \
--input-csv "./data/interim/sites.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--output-shp "./data/interim/sites.shp" --output-file "./data/interim/profile_features_crest_toes.csv" \
# # Creates a .csv of our dune toe and crest profile features from .shp file
# ./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv ### TWLs
# activate ./.venv && python ./src/cli.py create-profile-features \
# --dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \ ./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
# --dune-toe-shp "./data/raw/profile_features/dune_toes.shp" \ $(PYTHON_CLI) create-twl-forecast \
# --sites-csv "./data/interim/sites.csv" \
# --profiles-csv "./data/interim/profiles.csv" \
# --output-csv "./data/interim/profile_features.csv"
# Create a .csv of our dune toe and crest profile features from Tom Beuzen's .mat file
# Also apply an overwrite of some values, using an excel sheet
./data/interim/profile_features.csv: ./data/raw/profile_features_tom_beuzen/*.mat ./data/interim/sites.csv
activate ./.venv && python ./src/cli.py create-profile-features \
--crest-mat "./data/raw/profile_features_tom_beuzen/J16_DuneCrest.mat" \
--toe-mat "./data/raw/profile_features_tom_beuzen/J16_DuneToe.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/profile_features.csv" \
&& python ./src/cli.py apply-profile-features-overwrite \
--interim_file "./data/interim/profile_features.csv" \
--overwrite_file "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.xlsx" \
--profile_file "./data/interim/profiles.csv"
# Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope
./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
activate ./.venv && python ./src/cli.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \ --waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \ --tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features_crest_toes.csv" \
--runup-function "sto06" \ --runup-function "sto06" \
--slope "foreshore" \ --slope "foreshore" \
--profile-type "prestorm" \ --profile-type "prestorm" \
--output-file "./data/interim/twl_foreshore_slope_sto06.csv" --output-file "./data/interim/twl_foreshore_slope_sto06.csv"
./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
activate ./.venv && python ./src/cli.py create-twl-forecast \ $(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \ --waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \ --tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features_crest_toes.csv" \
--runup-function "sto06" \ --runup-function "sto06" \
--slope "mean" \ --slope "mean" \
--profile-type "prestorm" \ --profile-type "prestorm" \
--output-file "./data/interim/twl_mean_slope_sto06.csv" --output-file "./data/interim/twl_mean_slope_sto06.csv"
# ./data/interim/twl_poststorm_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
# activate ./.venv && python ./src/cli.py create-twl-forecast \ ### IMPACTS
# --waves-csv "./data/interim/waves.csv" \
# --tides-csv "./data/interim/tides.csv" \ ./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features_crest_toes.csv ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv
# --profiles-csv "./data/interim/profiles.csv" \ $(PYTHON_CLI) create-observed-impacts \
# --profile-features-csv "./data/interim/profile_features.csv" \
# --runup-function "sto06" \
# --slope "mean" \
# --profile-type "poststorm" \
# --output-file "./data/interim/twl_poststorm_mean_slope_sto06.csv"
./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv
activate ./.venv && python ./src/cli.py create-observed-impacts \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-crest-toes-csv "./data/interim/profile_features_crest_toes.csv" \
--raw-profile-features-csv "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv" \
--output-file "./data/interim/impacts_observed.csv" --output-file "./data/interim/impacts_observed.csv"
./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features_crest_toes.csv ./data/interim/twl_mean_slope_sto06.csv
activate ./.venv && python ./src/cli.py create-forecasted-impacts \ $(PYTHON_CLI) create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features_crest_toes.csv" \
--forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \ --forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv" --output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features_crest_toes.csv ./data/interim/twl_foreshore_slope_sto06.csv
activate ./.venv && python ./src/cli.py create-forecasted-impacts \ $(PYTHON_CLI) create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features_crest_toes.csv" \
--forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \ --forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv" --output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv"
# ./data/interim/impacts_forecasted_poststorm_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv
# activate ./.venv && python ./src/cli.py create-forecasted-impacts \
# --profile-features-csv "./data/interim/profile_features.csv" \
# --forecasted-twl-csv "./data/interim/twl_poststorm_mean_slope_sto06.csv" \
# --output-file "./data/interim/impacts_forecasted_poststorm_mean_slope_sto06.csv"
### GEOJSONs
geojsons: ./data/interim/impacts_forecasted_mean_slope_sto06.geojson ./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson ./data/interim/profile_features_crest_toes.geojson ./data/interim/sites.geojson
./data/interim/impacts_forecasted_mean_slope_sto06.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_observed.csv
activate ./.venv && python ./src/cli.py impacts-to-geojson \ $(PYTHON_CLI) impacts-to-geojson \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--observed-impacts-csv "./data/interim/impacts_observed.csv" \ --observed-impacts-csv "./data/interim/impacts_observed.csv" \
--forecast-impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06.csv" \ --forecast-impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06.csv" \
--output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06.geojson" --output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06.geojson"
./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv
activate ./.venv && python ./src/cli.py R-high-to-geojson \ $(PYTHON_CLI) r-high-to-geojson \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--profile-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06" \ --crest-toes-csv "./data/interim/profile_features_crest_toes.csv" \
--impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06.csv" \
--output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson" --output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson"
./data/interim/profile_features.geojson: ./data/interim/profile_features.csv ./data/interim/profile_features_crest_toes.geojson: ./data/interim/profile_features_crest_toes.csv
activate ./.venv && python ./src/cli.py R-high-to-geojson \ $(PYTHON_CLI) profile-features-crest-toes-to-geojson \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features_crest_toes.csv" \
--output-geojson "./data/interim/profile_features.geojson" --output-geojson "./data/interim/profile_features_crest_toes.geojson"
./data/interim/sites.geojson: ./data/interim/sites.csv ./data/interim/sites.geojson: ./data/interim/sites.csv
activate ./.venv && python ./src/cli.py sites-csv-to-geojson \ $(PYTHON_CLI) sites-csv-to-geojson \
--input-csv "./data/interim/sites.csv" \ --input-csv "./data/interim/sites.csv" \
--output-geojson "./data/interim/sites.geojson" --output-geojson "./data/interim/sites.geojson"

@ -14,6 +14,7 @@ dependencies:
- ipython - ipython
- ipywidgets - ipywidgets
- matplotlib - matplotlib
- line_profiler
- nbformat - nbformat
- notebook - notebook
- numpy - numpy

File diff suppressed because it is too large Load Diff

Binary file not shown.

@ -51,11 +51,22 @@ def forecast_twl(
how="inner", how="inner",
) )
df_temp["mhw"] = 0.5 df_temp["mhw"] = 0.5
# When calculating mean slope, we go from the dune toe to mhw. However, in some profiles, the dune toe is not
# defined. In these cases, we should go to the dune crest
df_temp["top_elevation"] = df_temp["dune_toe_z"]
df_temp.loc[df_temp.dune_toe_z.isnull(), "top_elevation"] = df_temp.loc[
df_temp.dune_toe_z.isnull(), "dune_crest_z"
]
df_temp["top_x"] = df_temp["dune_toe_x"]
df_temp.loc[df_temp.dune_toe_x.isnull(), "top_x"] = df_temp.loc[df_temp.dune_toe_x.isnull(), "dune_crest_x"]
with Pool(processes=n_processes) as pool: with Pool(processes=n_processes) as pool:
results = pool.starmap( results = pool.starmap(
mean_slope_for_site_id, mean_slope_for_site_id,
[(site_id, df_temp, df_profiles, "dune_toe_z", "dune_toe_x", "mhw") for site_id in site_ids], [(site_id, df_temp, df_profiles, "top_elevation", "top_x", "mhw") for site_id in site_ids],
) )
df_twl["beta"] = pd.concat(results) df_twl["beta"] = pd.concat(results)
# Estimate runup # Estimate runup
@ -91,11 +102,12 @@ def mean_slope_for_site_id(
""" """
# Get the prestorm beach profile # Get the prestorm beach profile
profile = df_profiles.query("site_id =='{}' and profile_type == '{}'".format(site_id, profile_type)) profile = df_profiles.loc[(site_id, profile_type)]
profile_x = profile.index.get_level_values("x").tolist() profile_x = profile.index.get_level_values("x").tolist()
profile_z = profile.z.tolist() profile_z = profile.z.tolist()
df_twl_site = df_twl.query("site_id == '{}'".format(site_id)) idx = pd.IndexSlice
df_twl_site = df_twl.loc[idx[site_id, :], :]
df_beta = df_twl_site.apply( df_beta = df_twl_site.apply(
lambda row: slope_from_profile( lambda row: slope_from_profile(

@ -148,16 +148,31 @@ def storm_regime(df_observed_impacts):
return df_observed_impacts return df_observed_impacts
def overwrite_impacts(df_observed_impacts, df_raw_features):
"""
Overwrites calculated impacts with impacts manually specified in profile_features file
:param df_raw_profile_features:
:return:
"""
df_observed_impacts.update(df_raw_features.rename(columns={"observed_storm_regime": "storm_regime"}))
return df_observed_impacts
@click.command() @click.command()
@click.option("--profiles-csv", required=True, help="") @click.option("--profiles-csv", required=True, help="")
@click.option("--profile-features-csv", required=True, help="") @click.option("--profile-features-crest-toes-csv", required=True, help="")
@click.option("--raw-profile-features-csv", required=True, help="")
@click.option("--output-file", required=True, help="") @click.option("--output-file", required=True, help="")
def create_observed_impacts(profiles_csv, profile_features_csv, output_file): def create_observed_impacts(profiles_csv, profile_features_crest_toes_csv, raw_profile_features_csv, output_file):
profiles_csv = "./data/interim/profiles.csv"
profile_features_crest_toes_csv = "./data/interim/profile_features_crest_toes.csv"
raw_profile_features_csv = "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv"
logger.info("Creating observed wave impacts") logger.info("Creating observed wave impacts")
logger.info("Importing data") logger.info("Importing data")
df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2]) df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
df_profile_features = pd.read_csv(profile_features_csv, index_col=[0, 1]) df_profile_features = pd.read_csv(profile_features_crest_toes_csv, index_col=[0, 1])
logger.info("Creating new dataframe for observed impacts") logger.info("Creating new dataframe for observed impacts")
df_observed_impacts = pd.DataFrame(index=df_profile_features.index.get_level_values("site_id").unique()) df_observed_impacts = pd.DataFrame(index=df_profile_features.index.get_level_values("site_id").unique())
@ -170,6 +185,10 @@ def create_observed_impacts(profiles_csv, profile_features_csv, output_file):
# Classify regime based on volume changes # Classify regime based on volume changes
df_observed_impacts = storm_regime(df_observed_impacts) df_observed_impacts = storm_regime(df_observed_impacts)
# Overwrite storm impacts with manually picked impacts
df_raw_features = pd.read_csv(raw_profile_features_csv, index_col=[0])
df_observed_impacts = overwrite_impacts(df_observed_impacts, df_raw_features)
# Save dataframe to csv # Save dataframe to csv
df_observed_impacts.to_csv(output_file, float_format="%.4f") df_observed_impacts.to_csv(output_file, float_format="%.4f")

@ -10,7 +10,6 @@ import click
import analysis.forecast_twl as forecast_twl import analysis.forecast_twl as forecast_twl
import analysis.forecasted_storm_impacts as forecasted_storm_impacts import analysis.forecasted_storm_impacts as forecasted_storm_impacts
import analysis.observed_storm_impacts as observed_storm_impacts import analysis.observed_storm_impacts as observed_storm_impacts
import data.apply_manual_overwrites as apply_manual_overwrites
import data.csv_to_geojson as csv_to_geojson import data.csv_to_geojson as csv_to_geojson
import data.parse_mat as parse_mat import data.parse_mat as parse_mat
@ -23,15 +22,14 @@ def cli():
if __name__ == "__main__": if __name__ == "__main__":
cli.add_command(apply_manual_overwrites.apply_profile_features_overwrite)
cli.add_command(csv_to_geojson.impacts_to_geojson) cli.add_command(csv_to_geojson.impacts_to_geojson)
cli.add_command(csv_to_geojson.profile_features_to_geojson) cli.add_command(csv_to_geojson.profile_features_crest_toes_to_geojson)
cli.add_command(csv_to_geojson.R_high_to_geojson) cli.add_command(csv_to_geojson.R_high_to_geojson)
cli.add_command(csv_to_geojson.sites_csv_to_geojson) cli.add_command(csv_to_geojson.sites_csv_to_geojson)
cli.add_command(forecast_twl.create_twl_forecast) cli.add_command(forecast_twl.create_twl_forecast)
cli.add_command(forecasted_storm_impacts.create_forecasted_impacts) cli.add_command(forecasted_storm_impacts.create_forecasted_impacts)
cli.add_command(observed_storm_impacts.create_observed_impacts) cli.add_command(observed_storm_impacts.create_observed_impacts)
cli.add_command(parse_mat.create_profile_features) cli.add_command(parse_mat.create_crest_toes)
cli.add_command(parse_mat.create_sites_and_profiles_csv) cli.add_command(parse_mat.create_sites_and_profiles_csv)
cli.add_command(parse_mat.create_tides_csv) cli.add_command(parse_mat.create_tides_csv)
cli.add_command(parse_mat.create_waves_csv) cli.add_command(parse_mat.create_waves_csv)

@ -1,103 +0,0 @@
"""
After generating interim data files based on raw data, we may need to overwrite some rows with manual data.
"""
import pandas as pd
import numpy as np
import click
from logs import setup_logging
logger = setup_logging()
def overwrite_profile_features(df_interim, df_overwrite, df_profiles, overwrite=True):
"""
Overwrite the interim profile features file with an excel file.
:param interim_file: Should be './data/interim/profile_features.csv'
:param overwrite_file: Should be './data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv'
:param overwrite: Whether or not to overwrite the original interim_file. If false, file will not be written
:return:
"""
# Merge
df_merged = df_interim.merge(df_overwrite, left_index=True, right_index=True, suffixes=["", "_overwrite"])
# Remove x vals if overwrite file as remove
df_merged.loc[df_merged.dune_crest_x_overwrite == "remove", "dune_crest_x"] = np.nan
df_merged.loc[df_merged.dune_toe_x_overwrite == "remove", "dune_toe_x"] = np.nan
# Put in new x vals. Note that a NaN value in the overwrite column, means keep the original value.
idx = (df_merged.dune_crest_x_overwrite.notnull()) & (df_merged.dune_crest_x_overwrite != "remove")
df_merged.loc[idx, "dune_crest_x"] = df_merged.loc[idx, "dune_crest_x_overwrite"]
idx = (df_merged.dune_toe_x_overwrite.notnull()) & (df_merged.dune_toe_x_overwrite != "remove")
df_merged.loc[idx, "dune_toe_x"] = df_merged.loc[idx, "dune_toe_x_overwrite"]
# Recalculate z values from x coordinates
for site_id in df_merged.index.get_level_values("site_id").unique():
logger.info("Overwriting dune crest/toes with manual values: {}".format(site_id))
# Get profiles
df_profile = df_profiles.query('site_id=="{}"'.format(site_id))
for param in ["prestorm", "poststorm"]:
for loc in ["crest", "toe"]:
# Get x value to find corresponding z value
x_val = df_merged.loc[(site_id, param), "dune_{}_x".format(loc)]
if np.isnan(x_val):
df_merged.loc[(site_id, param), "dune_{}_z".format(loc)] = np.nan
continue
# Get the corresponding z value for our x value
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, param, x_val)
# Try get the value from the other profile if we return nan or empty dataframe
if df_profile.query(query).empty:
if param == "prestorm":
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, "poststorm", x_val)
elif param == "poststorm":
query = 'site_id=="{}" & profile_type=="{}" & x=="{}"'.format(site_id, "prestorm", x_val)
z_val = df_profile.query(query).iloc[0].z
else:
z_val = df_profile.query(query).iloc[0].z
# Put results back into merged dataframe
df_merged.loc[(site_id, param), "dune_{}_z".format(loc)] = z_val
# Drop columns
df_merged = df_merged.drop(columns=["dune_crest_x_overwrite", "dune_toe_x_overwrite", "comment"], errors="ignore")
# Merge back into interim data frame. Use concat/duplicates since .update will not update nan values
df_final = pd.concat([df_merged, df_interim])
df_final = df_final[~df_final.index.duplicated(keep="first")]
df_final = df_final.sort_index()
# Write to file
return df_final
@click.command(short_help="overwrite profile_features with manual excel sheet")
@click.option("--interim_file", required=True, help="path of profile_features.csv")
@click.option("--overwrite_file", required=True, help="path of excel file with overwrite data")
@click.option("--profile_file", required=True, help="path of profiles.csv")
@click.option("--overwrite/--no-overwrite", default=True)
def apply_profile_features_overwrite(interim_file, overwrite_file, profile_file, overwrite):
logger.info("Overwriting profile features with manual excel file")
# Load files
df_interim = pd.read_csv(interim_file, index_col=[0, 1])
df_overwrite = pd.read_excel(overwrite_file)
df_profiles = pd.read_csv(profile_file, index_col=[0, 1, 2])
if "site_id" in df_overwrite.columns and "profile_type" in df_overwrite.columns:
df_overwrite = df_overwrite.set_index(["site_id", "profile_type"])
# Replace interim values with overwrite values
df_interim = overwrite_profile_features(df_interim, df_overwrite, df_profiles, overwrite)
# Write to csv
df_interim.to_csv(interim_file, float_format="%.3f")
logger.info("Done!")

@ -37,10 +37,11 @@ def lat_lon_from_profile_x_coord(center_lat_lon, orientation, center_profile_x,
@click.command() @click.command()
@click.option("--sites-csv", required=True, help=".csv file to convert") @click.option("--sites-csv", required=True, help=".csv file to convert")
@click.option("--profile-csv", required=True, help=".csv file to convert") @click.option("--profiles-csv", required=True, help=".csv file to convert")
@click.option("--crest-toes-csv", required=True, help=".csv file to convert")
@click.option("--impacts-csv", required=True, help=".csv file to convert") @click.option("--impacts-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file") @click.option("--output-geojson", required=True, help="where to store .geojson file")
def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson): def R_high_to_geojson(sites_csv, profiles_csv, crest_toes_csv, impacts_csv, output_geojson):
""" """
Converts impact R_high into a lat/lon geojson that we can plot in QGIS Converts impact R_high into a lat/lon geojson that we can plot in QGIS
:param sites_csv: :param sites_csv:
@ -49,13 +50,9 @@ def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
:param output_geojson: :param output_geojson:
:return: :return:
""" """
sites_csv = "./data/interim/sites.csv"
profiles_csv = "./data/interim/profiles.csv"
impacts_csv = "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
output_geojson = "./data/interim/R_high_forecasted_mean_slope_sto06.geojson"
df_sites = pd.read_csv(sites_csv, index_col=[0]) df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2]) df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
df_crest_toes = pd.read_csv(crest_toes_csv, index_col=[0, 1])
df_impacts = pd.read_csv(impacts_csv, index_col=[0]) df_impacts = pd.read_csv(impacts_csv, index_col=[0])
# Create geojson file # Create geojson file
@ -73,13 +70,15 @@ def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
# Find lat/lon of R_high position # Find lat/lon of R_high position
R_high_z = row["R_high"] R_high_z = row["R_high"]
# Get poststorm profile (or should this be prestorm?) # Get poststorm profile
df_profile = df_profiles.query('site_id=="{}" & profile_type=="prestorm"'.format(index)) df_profile = df_profiles.loc[(site_id, "prestorm")]
int_x = crossings(df_profile.index.get_level_values("x").tolist(), df_profile.z.tolist(), R_high_z) int_x = crossings(df_profile.index.get_level_values("x").tolist(), df_profile.z.tolist(), R_high_z)
# Take most landward interesection. Continue to next site if there is no intersection # Take the intersection closest to the dune face.
try: try:
int_x = max(int_x) x_cols = [x for x in df_crest_toes.columns if '_x' in x]
dune_face_x = np.mean(df_crest_toes.loc[(site_id, "prestorm"),x_cols].tolist())
int_x = min(int_x, key=lambda x: abs(x - dune_face_x))
except: except:
continue continue
@ -100,7 +99,7 @@ def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
@click.option("--sites-csv", required=True, help=".csv file to convert") @click.option("--sites-csv", required=True, help=".csv file to convert")
@click.option("--profile-features-csv", required=True, help=".csv file to convert") @click.option("--profile-features-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file") @click.option("--output-geojson", required=True, help="where to store .geojson file")
def profile_features_to_geojson(sites_csv, profile_features_csv, output_geojson): def profile_features_crest_toes_to_geojson(sites_csv, profile_features_csv, output_geojson):
""" """
Converts profile_features containing dune toes and crest locations to a geojson we can load into QGIS Converts profile_features containing dune toes and crest locations to a geojson we can load into QGIS
:param sites_csv: :param sites_csv:

@ -17,32 +17,69 @@ from logs import setup_logging
logger = setup_logging() logger = setup_logging()
def parse_orientations(orientations_mat): def parse_crest_toes(df_raw_features, df_profiles):
""" """
Parses the raw orientations.mat file and returns a pandas dataframe. Note that orientations are the direction Parses profile_features_chris_leaman.csv
towards land measured in degrees anti-clockwise from east. :param profile_features_csv:
:param orientations_mat:
:return: :return:
""" """
logger.info("Parsing %s", orientations_mat)
mat_data = loadmat(orientations_mat)["output"]
rows = []
for i in range(0, len(mat_data["beach"])):
rows.append(
{
"beach": mat_data["beach"][i],
"orientation": mat_data["orientation"][i],
"lat_center": mat_data["lat_center"][i],
"lon_center": mat_data["lon_center"][i],
"lat_land": mat_data["lat_land"][i],
"lon_land": mat_data["lon_land"][i],
"lat_sea": mat_data["lat_sea"][i],
"lon_sea": mat_data["lon_sea"][i],
}
)
df = pd.DataFrame(rows) # Puts profiles_features_csv into format expected by rest of analysis
return df df_crest_toes = df_raw_features.reset_index().melt(
id_vars=["site_id"],
value_vars=["prestorm_dune_crest_x", "prestorm_dune_toe_x", "poststorm_dune_crest_x", "poststorm_dune_toe_x"],
)
df_crest_toes["profile_type"] = df_crest_toes.variable.str.extract(r"(prestorm|poststorm)")
df_crest_toes["point_type"] = df_crest_toes.variable.str.extract(r"(dune_crest_x|dune_toe_x)")
df_crest_toes = df_crest_toes.drop(columns=["variable"])
df_crest_toes = df_crest_toes.sort_values("site_id")
df_crest_toes = df_crest_toes.set_index(["site_id", "profile_type", "point_type"])
df_crest_toes = df_crest_toes.unstack()
df_crest_toes.columns = df_crest_toes.columns.droplevel()
# Now let's calculate the corresponding z elevations for each of our x coordinates
for site_id in df_crest_toes.index.get_level_values("site_id").unique():
logger.info("Calculating dune toe/crest z elevations for {}".format(site_id))
# Get profile for this site
idx = pd.IndexSlice
df_profile = df_profiles.loc[idx[site_id, :, :], :]
for param in ["prestorm", "poststorm"]:
for loc in ["crest", "toe"]:
# Get x value to find corresponding z value
x_val = df_crest_toes.loc[(site_id, param), "dune_{}_x".format(loc)]
if np.isnan(x_val):
df_crest_toes.loc[(site_id, param), "dune_{}_z".format(loc)] = np.nan
continue
# Try get the value from the other profile if we return nan or empty dataframe
df_z = df_profile.loc[idx[site_id, param, x_val], :]
if df_z.empty:
if param == "prestorm":
new_param = "poststorm"
elif param == "poststorm":
new_param = "prestorm"
z_val = df_profile.loc[idx[site_id, new_param, x_val], :].z
else:
z_val = df_z.z
# # Try get the value from the other profile if we return nan or empty dataframe
# if df_profile.query(query).empty:
# if param == "prestorm":
# query = query.replace('prestorm', 'poststorm')
# elif param == "poststorm":
# query = query.replace('poststorm', 'prestorm')
# z_val = df_profile.query(query).iloc[0].z
# else:
# z_val = df_profile.query(query).iloc[0].z
# Put results back into merged dataframe
df_crest_toes.loc[(site_id, param), "dune_{}_z".format(loc)] = z_val
return df_crest_toes
def parse_dune_crest_toes(df_sites, crest_mat, toe_mat): def parse_dune_crest_toes(df_sites, crest_mat, toe_mat):
@ -93,40 +130,6 @@ def parse_dune_crest_toes(df_sites, crest_mat, toe_mat):
return df_profile_features return df_profile_features
def combine_sites_and_orientaions(df_sites, df_orientations):
"""
Replaces beach/lat/lon columns with the unique site_id.
:param dfs:
:param df_sites:
:return:
"""
df_merged_sites = df_sites.merge(
df_orientations[["beach", "lat_center", "lon_center", "orientation"]],
left_on=["beach", "lat", "lon"],
right_on=["beach", "lat_center", "lon_center"],
)
# Check that all our records have a unique site identifier
n_unmatched = len(df_sites) - len(df_merged_sites)
if n_unmatched > 0:
logger.warning("Not all records (%d of %d) matched with an orientation", n_unmatched, len(df_sites))
# Drop extra columns
df_merged_sites = df_merged_sites.drop(columns=["lat_center", "lon_center"])
return df_merged_sites
def specify_lat_lon_profile_center(df_sites, x_val=200):
"""
Specify which x-coordinate in the beach profile cross section the lat/lon corresponds to
:param df_sites:
:return:
"""
df_sites["profile_x_lat_lon"] = x_val
return df_sites
def parse_waves(waves_mat): def parse_waves(waves_mat):
""" """
Parses the raw waves.mat file and returns a pandas dataframe Parses the raw waves.mat file and returns a pandas dataframe
@ -403,16 +406,31 @@ def create_waves_csv(waves_mat, sites_csv, output_file):
logger.info("Created %s", output_file) logger.info("Created %s", output_file)
# @click.command(short_help="create profile_features.csv")
# @click.option("--crest-mat", required=True, help=".mat file containing wave records")
# @click.option("--toe-mat", required=True, help=".mat file containing wave records")
# @click.option("--sites-csv", required=True, help=".csv file description of cross section sites")
# @click.option("--output-file", required=True, help="where to save waves.csv")
# def create_profile_features(crest_mat, toe_mat, sites_csv, output_file):
# logger.info("Creating %s", output_file)
# df_sites = pd.read_csv(sites_csv, index_col=[0])
# df_profile_features = parse_dune_crest_toes(df_sites, crest_mat, toe_mat)
# df_profile_features.to_csv(output_file)
# logger.info("Created %s", output_file)
@click.command(short_help="create profile_features.csv") @click.command(short_help="create profile_features.csv")
@click.option("--crest-mat", required=True, help=".mat file containing wave records") @click.option("--profile-features-csv", required=True, help=".mat file containing wave records")
@click.option("--toe-mat", required=True, help=".mat file containing wave records") @click.option("--profiles-csv", required=True, help=".mat file containing wave records")
@click.option("--sites-csv", required=True, help=".csv file description of cross section sites")
@click.option("--output-file", required=True, help="where to save waves.csv") @click.option("--output-file", required=True, help="where to save waves.csv")
def create_profile_features(crest_mat, toe_mat, sites_csv, output_file): def create_crest_toes(profile_features_csv, profiles_csv, output_file):
logger.info("Creating %s", output_file) logger.info("Creating %s", output_file)
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profile_features = parse_dune_crest_toes(df_sites, crest_mat, toe_mat) df_raw_features = pd.read_csv(profile_features_csv, index_col=[0])
df_profile_features.to_csv(output_file) df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
df_crest_toes = parse_crest_toes(df_raw_features, df_profiles)
df_crest_toes.to_csv(output_file, float_format="%.3f")
logger.info("Created %s", output_file) logger.info("Created %s", output_file)
@ -432,7 +450,7 @@ def create_sites_and_profiles_csv(profiles_mat, profiles_output_file, sites_outp
df_profiles.to_csv(profiles_output_file) df_profiles.to_csv(profiles_output_file)
logger.info("Created %s", profiles_output_file) logger.info("Created %s", profiles_output_file)
df_sites.to_csv(sites_output_file) df_sites.to_csv(sites_output_file, float_format="%.3f")
logger.info("Created %s", sites_output_file) logger.info("Created %s", sites_output_file)

@ -1,6 +1,6 @@
--- ---
version: 1 version: 1
disable_existing_loggers: True disable_existing_loggers: False
formatters: formatters:
simple: simple:
format: "[%(asctime)s] [%(filename)15.15s:%(lineno)4.4s %(funcName)15.15s] [%(levelname)-4.4s] %(message)s" format: "[%(asctime)s] [%(filename)15.15s:%(lineno)4.4s %(funcName)15.15s] [%(levelname)-4.4s] %(message)s"
@ -9,7 +9,7 @@ formatters:
handlers: handlers:
console: console:
class: logging.StreamHandler class: logging.StreamHandler
level: DEBUG level: INFO
formatter: simple formatter: simple
stream: ext://sys.stdout stream: ext://sys.stdout

Loading…
Cancel
Save