From 07046d4686c6206359f3cd42aaa1a432c28bbbbd Mon Sep 17 00:00:00 2001 From: Chris Leaman Date: Wed, 21 Nov 2018 12:13:44 +1100 Subject: [PATCH] Update CLI commands --- Makefile | 48 +++++++++++++++++++-- src/analysis/forecast_twl.py | 54 ++++++++++++++++-------- src/analysis/forecasted_storm_impacts.py | 32 ++++++++++---- src/analysis/observed_storm_impacts.py | 41 +++++++++++++++++- 4 files changed, 145 insertions(+), 30 deletions(-) diff --git a/Makefile b/Makefile index 1dbf142..fc7f22b 100644 --- a/Makefile +++ b/Makefile @@ -48,21 +48,21 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr --output-file "./data/interim/sites.csv" # Produces a .csv of waves for each site -./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat ./src/data/parse_mat.py +./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat pipenv run python ./src/data/parse_mat.py create-waves-csv \ --waves-mat "./data/raw/processed_shorelines/waves.mat" \ --sites-csv "./data/interim/sites.csv" \ --output-file "./data/interim/waves.csv" # Produces a .csv of profiles for each site -./data/interim/profiles.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/profiles.mat ./src/data/parse_mat.py +./data/interim/profiles.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/profiles.mat pipenv run python ./src/data/parse_mat.py create-profiles-csv \ --profiles-mat "./data/raw/processed_shorelines/profiles.mat" \ --sites-csv "./data/interim/sites.csv" \ --output-file "./data/interim/profiles.csv" # Produces a .csv of tides for each site -./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat ./src/data/parse_mat.py +./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat pipenv run python ./src/data/parse_mat.py create-tides-csv \ --tides-mat "./data/raw/processed_shorelines/tides.mat" \ --sites-csv "./data/interim/sites.csv" \ @@ -74,6 +74,7 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr --input-csv "./data/interim/sites.csv" \ --output-shp "./data/interim/sites.shp" +# Creates a .csv of our dune toe and crest profile features ./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv pipenv run python ./src/data/profile_features.py create-profile-features \ --dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \ @@ -82,6 +83,47 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr --profiles-csv "./data/interim/profiles.csv" \ --output-csv "./data/interim/profile_features.csv" +# Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope +./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv + pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \ + --waves-csv "./data/interim/waves.csv" \ + --tides-csv "./data/interim/tides.csv" \ + --profiles-csv "./data/interim/profiles.csv" \ + --profile-features-csv "./data/interim/profile_features.csv" \ + --runup-function "sto06" \ + --slope "foreshore" \ + --output-file "./data/interim/twl_foreshore_slope_sto06.csv" + +# Creates a forecast of twl using sto06 and prestorm mean foreshore slope +./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv + pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \ + --waves-csv "./data/interim/waves.csv" \ + --tides-csv "./data/interim/tides.csv" \ + --profiles-csv "./data/interim/profiles.csv" \ + --profile-features-csv "./data/interim/profile_features.csv" \ + --runup-function "sto06" \ + --slope "mean" \ + --output-file "./data/interim/twl_mean_slope_sto06.csv" + +./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv + pipenv run python ./src/analysis/observed_storm_impacts.py create-observed-impacts \ + --profiles-csv "./data/interim/profiles.csv" \ + --profile-features-csv "./data/interim/profile_features.csv" \ + --output-file "./data/interim/impacts_observed.csv" + +./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv + pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \ + --profile-features-csv "./data/interim/profile_features.csv" \ + --forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \ + --output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv" + +./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv + pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \ + --profile-features-csv "./data/interim/profile_features.csv" \ + --forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \ + --output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv" + + ################################################################################# # PROJECT RULES # ################################################################################# diff --git a/src/analysis/forecast_twl.py b/src/analysis/forecast_twl.py index 2fe1b0a..175c887 100644 --- a/src/analysis/forecast_twl.py +++ b/src/analysis/forecast_twl.py @@ -1,13 +1,14 @@ import logging.config import os from multiprocessing import Pool - +import click import numpy as np import numpy.ma as ma import pandas as pd from scipy import stats -from src.analysis.runup_models import sto06_individual, sto06 + +from src.analysis import runup_models logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False) logger = logging.getLogger(__name__) @@ -126,7 +127,7 @@ def foreshore_slope_for_site_id(site_id, df_twl, df_profiles): profile_x=profile_x, profile_z=profile_z, tide=row.tide, - runup_function=sto06_individual, + runup_function=runup_models.sto06_individual, Hs0=row.Hs0, Tp=row.Tp, ), @@ -266,25 +267,42 @@ def crossings(profile_x, profile_z, constant_z): return [profile_x[i] - (profile_x[i] - profile_x[i + 1]) / (z[i] - z[i + 1]) * (z[i]) for i in indicies] -if __name__ == "__main__": +@click.command() +@click.option("--waves-csv", required=True, help="") +@click.option("--tides-csv", required=True, help="") +@click.option("--profiles-csv", required=True, help="") +@click.option("--profile-features-csv", required=True, help="") +@click.option("--runup-function", required=True, help="", type=click.Choice(["sto06"])) +@click.option("--slope", required=True, help="", type=click.Choice(["foreshore", "mean"])) +@click.option("--output-file", required=True, help="") +def create_twl_forecast(waves_csv, tides_csv, profiles_csv, profile_features_csv, runup_function, slope, output_file): + logger.info("Creating forecast of total water levels") logger.info("Importing data") - data_folder = "./data/interim" - df_waves = pd.read_csv(os.path.join(data_folder, "waves.csv"), index_col=[0, 1]) - df_tides = pd.read_csv(os.path.join(data_folder, "tides.csv"), index_col=[0, 1]) - df_profiles = pd.read_csv(os.path.join(data_folder, "profiles.csv"), index_col=[0, 1, 2]) - df_sites = pd.read_csv(os.path.join(data_folder, "sites.csv"), index_col=[0]) - df_profile_features = pd.read_csv(os.path.join(data_folder, "profile_features.csv"), index_col=[0]) + df_waves = pd.read_csv(waves_csv, index_col=[0, 1]) + df_tides = pd.read_csv(tides_csv, index_col=[0, 1]) + df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2]) + df_profile_features = pd.read_csv(profile_features_csv, index_col=[0]) logger.info("Forecasting TWL") - df_twl_foreshore_slope_sto06 = forecast_twl( - df_tides, df_profiles, df_waves, df_profile_features, runup_function=sto06, slope="foreshore" + df_tides, + df_profiles, + df_waves, + df_profile_features, + runup_function=getattr(runup_models, runup_function), + slope=slope, ) - df_twl_foreshore_slope_sto06.to_csv(os.path.join(data_folder, "twl_foreshore_slope_sto06.csv")) - df_twl_mean_slope_sto06 = forecast_twl( - df_tides, df_profiles, df_waves, df_profile_features, runup_function=sto06, slope="mean" - ) - df_twl_mean_slope_sto06.to_csv(os.path.join(data_folder, "twl_mean_slope_sto06.csv")) + df_twl_foreshore_slope_sto06.to_csv(output_file) + logger.info("Saved to %s", output_file) + logger.info("Done!") - logger.info("Done") + +@click.group() +def cli(): + pass + + +if __name__ == "__main__": + cli.add_command(create_twl_forecast) + cli() diff --git a/src/analysis/forecasted_storm_impacts.py b/src/analysis/forecasted_storm_impacts.py index 8e4acbd..d156cb1 100644 --- a/src/analysis/forecasted_storm_impacts.py +++ b/src/analysis/forecasted_storm_impacts.py @@ -4,7 +4,7 @@ Estimates the forecasted storm impacts based on the forecasted water level and d import logging.config import os - +import click import pandas as pd logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False) @@ -19,7 +19,7 @@ def forecasted_impacts(df_profile_features, df_forecasted_twl): :param df_forecasted_twl: :return: """ - logger.info("Getting forecasted storm regimes") + logger.info("Getting forecasted storm impacts") df_forecasted_impacts = pd.DataFrame(index=df_profile_features.index) @@ -67,12 +67,28 @@ def storm_regime(df_forecasted_impacts): return df_forecasted_impacts -if __name__ == "__main__": +@click.command() +@click.option("--profile-features-csv", required=True, help="") +@click.option("--forecasted-twl-csv", required=True, help="") +@click.option("--output-file", required=True, help="") +def create_forecasted_impacts(profile_features_csv, forecasted_twl_csv, output_file): + + logger.info("Creating observed wave impacts") logger.info("Importing existing data") - data_folder = "./data/interim" - df_profiles = pd.read_csv(os.path.join(data_folder, "profiles.csv"), index_col=[0, 1, 2]) - df_profile_features = pd.read_csv(os.path.join(data_folder, "profile_features.csv"), index_col=[0]) - df_forecasted_twl = pd.read_csv(os.path.join(data_folder, "twl_mean_slope_sto06.csv"), index_col=[0, 1]) + df_profile_features = pd.read_csv(profile_features_csv, index_col=[0]) + df_forecasted_twl = pd.read_csv(forecasted_twl_csv, index_col=[0, 1]) df_forecasted_impacts = forecasted_impacts(df_profile_features, df_forecasted_twl) - df_forecasted_impacts.to_csv(os.path.join(data_folder, "impacts_forecasted_mean_slope_sto06.csv")) + df_forecasted_impacts.to_csv(output_file) + logger.info("Saved to %s", output_file) + logger.info("Done!") + + +@click.group() +def cli(): + pass + + +if __name__ == "__main__": + cli.add_command(create_forecasted_impacts) + cli() diff --git a/src/analysis/observed_storm_impacts.py b/src/analysis/observed_storm_impacts.py index 7666627..7c12d6a 100644 --- a/src/analysis/observed_storm_impacts.py +++ b/src/analysis/observed_storm_impacts.py @@ -1,6 +1,6 @@ import logging.config import os - +import click import numpy as np import pandas as pd from scipy.integrate import simps @@ -143,3 +143,42 @@ if __name__ == "__main__": # Save dataframe to csv df_observed_impacts.to_csv(os.path.join(data_folder, "impacts_observed.csv")) + + +@click.command() +@click.option("--profiles-csv", required=True, help="") +@click.option("--profile-features-csv", required=True, help="") +@click.option("--output-file", required=True, help="") +def create_observed_impacts(profiles_csv, profile_features_csv, output_file): + + logger.info("Creating observed wave impacts") + logger.info("Importing data") + df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2]) + df_profile_features = pd.read_csv(profile_features_csv, index_col=[0]) + + logger.info("Creating new dataframe for observed impacts") + df_observed_impacts = pd.DataFrame(index=df_profile_features.index) + + logger.info("Getting pre/post storm volumes") + df_swash_vol_changes = volume_change(df_profiles, df_profile_features, zone="swash") + df_dune_face_vol_changes = volume_change(df_profiles, df_profile_features, zone="dune_face") + df_observed_impacts = df_observed_impacts.join([df_swash_vol_changes, df_dune_face_vol_changes]) + + # Classify regime based on volume changes + df_observed_impacts = storm_regime(df_observed_impacts) + + # Save dataframe to csv + df_observed_impacts.to_csv(output_file) + + logger.info("Saved to %s", output_file) + logger.info("Done!") + + +@click.group() +def cli(): + pass + + +if __name__ == "__main__": + cli.add_command(create_observed_impacts) + cli()