Compare commits

..

No commits in common. 'ba9b3244f3c31a8bb1f3015c91fc9386aaf9c7b7' and '86c2e0e763db8a6de9492a9e6737b9e0e2fe36f4' have entirely different histories.

1
.gitignore vendored

@ -23,4 +23,3 @@ __pycache__/
/.venv/
*.log
*.py.lprof
/.vscode/settings.json

@ -80,7 +80,7 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv"
./data/interim/profile_features_crest_toes.csv : ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv ./data/interim/profiles.csv
./data/interim/profile_features_crest_toes.csv : ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv
$(PYTHON_CLI) create-crest-toes \
--profile-features-csv "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv" \
--profiles-csv "./data/interim/profiles.csv" \
@ -160,7 +160,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "prestorm" \
--output-file "./data/interim/twl_premean_slope_nie91.csv"
./data/interim/twl_premean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
./data/interim/twl_premean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -208,7 +208,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "poststorm" \
--output-file "./data/interim/twl_postmean_slope_nie91.csv"
./data/interim/twl_postmean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
./data/interim/twl_postmean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -256,7 +256,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "prestorm" \
--output-file "./data/interim/twl_preintertidal_slope_nie91.csv"
./data/interim/twl_preintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
./data/interim/twl_preintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -304,7 +304,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "poststorm" \
--output-file "./data/interim/twl_postintertidal_slope_nie91.csv"
./data/interim/twl_postintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
./data/interim/twl_postintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \

@ -422,6 +422,6 @@ def create_twl_forecast(
profile_type=profile_type,
)
df_twl.to_csv(output_file, float_format="%.4f")
df_twl.to_csv(output_file)
logger.info("Saved to %s", output_file)
logger.info("Done!")

@ -64,7 +64,8 @@ def storm_regime(df_forecasted_impacts):
df_forecasted_impacts.dune_toe_z <= df_forecasted_impacts.R_high, "storm_regime"
] = "collision"
df_forecasted_impacts.loc[
(df_forecasted_impacts.dune_crest_z <= df_forecasted_impacts.R_high),
(df_forecasted_impacts.dune_crest_z <= df_forecasted_impacts.R_high)
& (df_forecasted_impacts.R_low <= df_forecasted_impacts.dune_crest_z),
"storm_regime",
] = "overwash"
df_forecasted_impacts.loc[
@ -83,7 +84,12 @@ def storm_regime(df_forecasted_impacts):
return df_forecasted_impacts
def twl_exceedence_time(df_profile_features, df_forecasted_twl, z_twl_col="R_high"):
def twl_exceedence_time(
df_profile_features,
df_forecasted_twl,
z_twl_col="R_high",
z_exceedence_col="dune_toe_z",
):
"""
Returns a dataframe of number of hours the twl exceeded a certain z elevation.
May need to use this https://stackoverflow.com/a/53656968 if datetimes are not consistent.
@ -95,13 +101,12 @@ def twl_exceedence_time(df_profile_features, df_forecasted_twl, z_twl_col="R_hig
"""
logger.info("Getting twl exceedence time")
# Get the elevation we want to calculate the time TWL exceedes this level.
# Note it's usually dune toe, but some profiles don't have a dune toe. In these cases, use dune crest value
df_temp = df_profile_features.xs("prestorm", level="profile_type").copy()
df_temp.loc[df_temp.dune_toe_z.isnull(), "dune_toe_z"] = df_temp[
df_temp.dune_toe_z.isnull()
].dune_crest_z
df_dune_toes = df_temp.dune_toe_z.to_frame()
# Get a dataframe of prestorm dune toes organised by site_id
df_dune_toes = (
df_profile_features.query('profile_type=="prestorm"')
.reset_index("profile_type")[z_exceedence_col]
.to_frame()
)
# Merge dune toes into site_id
df_merged = df_forecasted_twl.merge(
@ -110,10 +115,10 @@ def twl_exceedence_time(df_profile_features, df_forecasted_twl, z_twl_col="R_hig
# Return the sum of hours that twl exceeded the level
return (
(df_merged[z_twl_col] >= df_merged["dune_toe_z"])
(df_merged[z_twl_col] >= df_merged[z_exceedence_col])
.groupby("site_id")
.sum()
.rename("twl_{}_exceedance_hrs".format("dune_toe_z"))
.rename("twl_{}_exceedance_hrs".format(z_exceedence_col))
.to_frame()
)
@ -136,6 +141,6 @@ def create_forecasted_impacts(profile_features_csv, forecasted_twl_csv, output_f
right_on=["site_id"],
)
df_forecasted_impacts.to_csv(output_file, float_format="%.4f")
df_forecasted_impacts.to_csv(output_file)
logger.info("Saved to %s", output_file)
logger.info("Done!")

@ -154,16 +154,12 @@ def volume_change(df_profiles, df_profile_features, zone):
if np.isnan(diff_vol):
diff_vol = 0
# Base pct change on diff volume
if diff_vol == 0:
pct_change = 0
else:
pct_change = diff_vol / prestorm_vol * 100
df_vol_changes.loc[site_id, "prestorm_{}_vol".format(zone)] = prestorm_vol
df_vol_changes.loc[site_id, "poststorm_{}_vol".format(zone)] = poststorm_vol
df_vol_changes.loc[site_id, "{}_vol_change".format(zone)] = diff_vol
df_vol_changes.loc[site_id, "{}_pct_change".format(zone)] = pct_change
df_vol_changes.loc[site_id, "{}_pct_change".format(zone)] = (
diff_vol / prestorm_vol * 100
)
return df_vol_changes
@ -233,6 +229,12 @@ def create_observed_impacts(
profiles_csv, profile_features_crest_toes_csv, raw_profile_features_csv, output_file
):
profiles_csv = "./data/interim/profiles.csv"
profile_features_crest_toes_csv = "./data/interim/profile_features_crest_toes.csv"
raw_profile_features_csv = (
"./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv"
)
logger.info("Creating observed wave impacts")
logger.info("Importing data")
df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
@ -259,8 +261,6 @@ def create_observed_impacts(
df_raw_features = pd.read_csv(raw_profile_features_csv, index_col=[0])
df_observed_impacts = overwrite_impacts(df_observed_impacts, df_raw_features)
# TODO Calculate change in slopes, shoreline and volume
# Save dataframe to csv
df_observed_impacts.to_csv(output_file, float_format="%.4f")

@ -68,7 +68,7 @@ def parse_crest_toes(df_raw_features, df_profiles):
# Try get the value from the other profile if we return nan or empty dataframe
df_z = df_profile.loc[idx[site_id, param, x_val], :]
if np.isnan(df_z.z):
if df_z.empty:
if param == "prestorm":
new_param = "poststorm"
elif param == "poststorm":
@ -77,6 +77,16 @@ def parse_crest_toes(df_raw_features, df_profiles):
else:
z_val = df_z.z
# # Try get the value from the other profile if we return nan or empty dataframe
# if df_profile.query(query).empty:
# if param == "prestorm":
# query = query.replace('prestorm', 'poststorm')
# elif param == "poststorm":
# query = query.replace('poststorm', 'prestorm')
# z_val = df_profile.query(query).iloc[0].z
# else:
# z_val = df_profile.query(query).iloc[0].z
# Put results back into merged dataframe
df_crest_toes.loc[(site_id, param), "dune_{}_z".format(loc)] = z_val
@ -457,8 +467,8 @@ def create_waves_csv(waves_mat, sites_csv, waves_output_file, sites_waves_output
df_waves.set_index(["site_id", "datetime"], inplace=True)
df_waves.sort_index(inplace=True)
df_waves, df_sites_waves = split_site_wave_params(df_waves)
df_waves.to_csv(waves_output_file, float_format="%.4f")
df_sites_waves.to_csv(sites_waves_output_file, float_format="%.4f")
df_waves.to_csv(waves_output_file)
df_sites_waves.to_csv(sites_waves_output_file)
logger.info("Created %s", waves_output_file)
logger.info("Created %s", sites_waves_output_file)
@ -513,9 +523,9 @@ def create_sites_and_profiles_csv(
df_sites.set_index(["site_id"], inplace=True)
df_sites.sort_index(inplace=True)
df_profiles.to_csv(profiles_output_file, float_format="%.8f")
df_profiles.to_csv(profiles_output_file)
logger.info("Created %s", profiles_output_file)
df_sites.to_csv(sites_output_file, float_format="%.8f")
df_sites.to_csv(sites_output_file, float_format="%.3f")
logger.info("Created %s", sites_output_file)
@ -532,7 +542,7 @@ def create_tides_csv(tides_mat, sites_csv, output_file):
df_tides = replace_unique_sites(df_tides, df_sites)
df_tides.set_index(["site_id", "datetime"], inplace=True)
df_tides.sort_index(inplace=True)
df_tides.to_csv(output_file, float_format="%.4f")
df_tides.to_csv(output_file)
logger.info("Created %s", output_file)

Loading…
Cancel
Save