Compare commits

..

9 Commits

Author SHA1 Message Date
Chris Leaman ba9b3244f3 Ignore vscode settings
Chris Leaman e1dc57ead6 Fix formatting
Chris Leaman 1774c15e89 Fix finding z elevation of dune toe/crests
If the x-coordinate specified for the toe/crest does not exist for the profile type, this fixes an issue where the z-elevation of the other profile type was not being correctly calculated. Also, some commented out code is removed.
Chris Leaman 16ca2aa482 Fix volume difference percent when difference equal to zero
Chris Leaman bc1778c473 Fix dependency in Makefile
Chris Leaman cdca489e4a Fix dune toe TWL exceedence hours
Dune toe TWL exceedence hours were being left as zero if the profile didn't have a dune toe (technically correct). In these cases it's more useful to calculate the exceedence hours of the dune crest level.
Chris Leaman 1fca122276 Ensure float precision is limited when outputting csv
Without this change, interim .csv files were being recorded with 9 decimal places, greatly increasing the file size. This keeps the number of decimals limited to a practical amount.
Chris Leaman 9e3716480a Fix hard-coded file names in create_observed_impacts
Chris Leaman bf61116efb Remove R_low requirement when forecasting overwash
Some runup parameterizations only give R_high, but not R_low. In these cases, let's just classify the profile as overwash if R_high > D_high.

1
.gitignore vendored

@ -23,3 +23,4 @@ __pycache__/
/.venv/
*.log
*.py.lprof
/.vscode/settings.json

@ -80,7 +80,7 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv"
./data/interim/profile_features_crest_toes.csv : ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv
./data/interim/profile_features_crest_toes.csv : ./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv ./data/interim/profiles.csv
$(PYTHON_CLI) create-crest-toes \
--profile-features-csv "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv" \
--profiles-csv "./data/interim/profiles.csv" \
@ -160,7 +160,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "prestorm" \
--output-file "./data/interim/twl_premean_slope_nie91.csv"
./data/interim/twl_premean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
./data/interim/twl_premean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -208,7 +208,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "poststorm" \
--output-file "./data/interim/twl_postmean_slope_nie91.csv"
./data/interim/twl_postmean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
./data/interim/twl_postmean_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -256,7 +256,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "prestorm" \
--output-file "./data/interim/twl_preintertidal_slope_nie91.csv"
./data/interim/twl_preintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
./data/interim/twl_preintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
@ -304,7 +304,7 @@ twls: ./data/interim/twl_postintertidal_slope_pow18.csv
--profile-type "poststorm" \
--output-file "./data/interim/twl_postintertidal_slope_nie91.csv"
./data/interim/twl_postintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv
./data/interim/twl_postintertidal_slope_pow18.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features_crest_toes.csv ./data/interim/sites_grain_size.csv
$(PYTHON_CLI) create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \

@ -422,6 +422,6 @@ def create_twl_forecast(
profile_type=profile_type,
)
df_twl.to_csv(output_file)
df_twl.to_csv(output_file, float_format="%.4f")
logger.info("Saved to %s", output_file)
logger.info("Done!")

@ -64,8 +64,7 @@ def storm_regime(df_forecasted_impacts):
df_forecasted_impacts.dune_toe_z <= df_forecasted_impacts.R_high, "storm_regime"
] = "collision"
df_forecasted_impacts.loc[
(df_forecasted_impacts.dune_crest_z <= df_forecasted_impacts.R_high)
& (df_forecasted_impacts.R_low <= df_forecasted_impacts.dune_crest_z),
(df_forecasted_impacts.dune_crest_z <= df_forecasted_impacts.R_high),
"storm_regime",
] = "overwash"
df_forecasted_impacts.loc[
@ -84,12 +83,7 @@ def storm_regime(df_forecasted_impacts):
return df_forecasted_impacts
def twl_exceedence_time(
df_profile_features,
df_forecasted_twl,
z_twl_col="R_high",
z_exceedence_col="dune_toe_z",
):
def twl_exceedence_time(df_profile_features, df_forecasted_twl, z_twl_col="R_high"):
"""
Returns a dataframe of number of hours the twl exceeded a certain z elevation.
May need to use this https://stackoverflow.com/a/53656968 if datetimes are not consistent.
@ -101,12 +95,13 @@ def twl_exceedence_time(
"""
logger.info("Getting twl exceedence time")
# Get a dataframe of prestorm dune toes organised by site_id
df_dune_toes = (
df_profile_features.query('profile_type=="prestorm"')
.reset_index("profile_type")[z_exceedence_col]
.to_frame()
)
# Get the elevation we want to calculate the time TWL exceedes this level.
# Note it's usually dune toe, but some profiles don't have a dune toe. In these cases, use dune crest value
df_temp = df_profile_features.xs("prestorm", level="profile_type").copy()
df_temp.loc[df_temp.dune_toe_z.isnull(), "dune_toe_z"] = df_temp[
df_temp.dune_toe_z.isnull()
].dune_crest_z
df_dune_toes = df_temp.dune_toe_z.to_frame()
# Merge dune toes into site_id
df_merged = df_forecasted_twl.merge(
@ -115,10 +110,10 @@ def twl_exceedence_time(
# Return the sum of hours that twl exceeded the level
return (
(df_merged[z_twl_col] >= df_merged[z_exceedence_col])
(df_merged[z_twl_col] >= df_merged["dune_toe_z"])
.groupby("site_id")
.sum()
.rename("twl_{}_exceedance_hrs".format(z_exceedence_col))
.rename("twl_{}_exceedance_hrs".format("dune_toe_z"))
.to_frame()
)
@ -141,6 +136,6 @@ def create_forecasted_impacts(profile_features_csv, forecasted_twl_csv, output_f
right_on=["site_id"],
)
df_forecasted_impacts.to_csv(output_file)
df_forecasted_impacts.to_csv(output_file, float_format="%.4f")
logger.info("Saved to %s", output_file)
logger.info("Done!")

@ -154,12 +154,16 @@ def volume_change(df_profiles, df_profile_features, zone):
if np.isnan(diff_vol):
diff_vol = 0
# Base pct change on diff volume
if diff_vol == 0:
pct_change = 0
else:
pct_change = diff_vol / prestorm_vol * 100
df_vol_changes.loc[site_id, "prestorm_{}_vol".format(zone)] = prestorm_vol
df_vol_changes.loc[site_id, "poststorm_{}_vol".format(zone)] = poststorm_vol
df_vol_changes.loc[site_id, "{}_vol_change".format(zone)] = diff_vol
df_vol_changes.loc[site_id, "{}_pct_change".format(zone)] = (
diff_vol / prestorm_vol * 100
)
df_vol_changes.loc[site_id, "{}_pct_change".format(zone)] = pct_change
return df_vol_changes
@ -229,12 +233,6 @@ def create_observed_impacts(
profiles_csv, profile_features_crest_toes_csv, raw_profile_features_csv, output_file
):
profiles_csv = "./data/interim/profiles.csv"
profile_features_crest_toes_csv = "./data/interim/profile_features_crest_toes.csv"
raw_profile_features_csv = (
"./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv"
)
logger.info("Creating observed wave impacts")
logger.info("Importing data")
df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
@ -261,6 +259,8 @@ def create_observed_impacts(
df_raw_features = pd.read_csv(raw_profile_features_csv, index_col=[0])
df_observed_impacts = overwrite_impacts(df_observed_impacts, df_raw_features)
# TODO Calculate change in slopes, shoreline and volume
# Save dataframe to csv
df_observed_impacts.to_csv(output_file, float_format="%.4f")

@ -68,7 +68,7 @@ def parse_crest_toes(df_raw_features, df_profiles):
# Try get the value from the other profile if we return nan or empty dataframe
df_z = df_profile.loc[idx[site_id, param, x_val], :]
if df_z.empty:
if np.isnan(df_z.z):
if param == "prestorm":
new_param = "poststorm"
elif param == "poststorm":
@ -77,16 +77,6 @@ def parse_crest_toes(df_raw_features, df_profiles):
else:
z_val = df_z.z
# # Try get the value from the other profile if we return nan or empty dataframe
# if df_profile.query(query).empty:
# if param == "prestorm":
# query = query.replace('prestorm', 'poststorm')
# elif param == "poststorm":
# query = query.replace('poststorm', 'prestorm')
# z_val = df_profile.query(query).iloc[0].z
# else:
# z_val = df_profile.query(query).iloc[0].z
# Put results back into merged dataframe
df_crest_toes.loc[(site_id, param), "dune_{}_z".format(loc)] = z_val
@ -467,8 +457,8 @@ def create_waves_csv(waves_mat, sites_csv, waves_output_file, sites_waves_output
df_waves.set_index(["site_id", "datetime"], inplace=True)
df_waves.sort_index(inplace=True)
df_waves, df_sites_waves = split_site_wave_params(df_waves)
df_waves.to_csv(waves_output_file)
df_sites_waves.to_csv(sites_waves_output_file)
df_waves.to_csv(waves_output_file, float_format="%.4f")
df_sites_waves.to_csv(sites_waves_output_file, float_format="%.4f")
logger.info("Created %s", waves_output_file)
logger.info("Created %s", sites_waves_output_file)
@ -523,9 +513,9 @@ def create_sites_and_profiles_csv(
df_sites.set_index(["site_id"], inplace=True)
df_sites.sort_index(inplace=True)
df_profiles.to_csv(profiles_output_file)
df_profiles.to_csv(profiles_output_file, float_format="%.8f")
logger.info("Created %s", profiles_output_file)
df_sites.to_csv(sites_output_file, float_format="%.3f")
df_sites.to_csv(sites_output_file, float_format="%.8f")
logger.info("Created %s", sites_output_file)
@ -542,7 +532,7 @@ def create_tides_csv(tides_mat, sites_csv, output_file):
df_tides = replace_unique_sites(df_tides, df_sites)
df_tides.set_index(["site_id", "datetime"], inplace=True)
df_tides.sort_index(inplace=True)
df_tides.to_csv(output_file)
df_tides.to_csv(output_file, float_format="%.4f")
logger.info("Created %s", output_file)

Loading…
Cancel
Save