From faa843ce2131cde8b1e32dbe25fae3df2eea82da Mon Sep 17 00:00:00 2001 From: Chris Leaman Date: Wed, 19 Dec 2018 16:16:17 +1100 Subject: [PATCH] Fix formatting --- src/analysis/forecast_twl.py | 14 +++++----- src/analysis/observed_storm_impacts.py | 14 ++++------ src/data/csv_to_geojson.py | 4 +-- src/data/parse_mat.py | 38 +++++++++++++------------- 4 files changed, 34 insertions(+), 36 deletions(-) diff --git a/src/analysis/forecast_twl.py b/src/analysis/forecast_twl.py index d0e389f..5298caa 100644 --- a/src/analysis/forecast_twl.py +++ b/src/analysis/forecast_twl.py @@ -14,6 +14,7 @@ logger = setup_logging() MULTIPROCESS_THREADS = int(os.environ.get("MULTIPROCESS_THREADS", 4)) + def forecast_twl( df_tides, df_profiles, @@ -53,12 +54,12 @@ def forecast_twl( # When calculating mean slope, we go from the dune toe to mhw. However, in some profiles, the dune toe is not # defined. In these cases, we should go to the dune crest - df_temp['top_elevation'] = df_temp['dune_toe_z'] - df_temp.loc[df_temp.dune_toe_z.isnull(), 'top_elevation'] = df_temp.loc[df_temp.dune_toe_z.isnull(), - 'dune_crest_z'] - df_temp['top_x'] = df_temp['dune_toe_x'] - df_temp.loc[df_temp.dune_toe_x.isnull(), 'top_x'] = df_temp.loc[df_temp.dune_toe_x.isnull(), - 'dune_crest_x'] + df_temp["top_elevation"] = df_temp["dune_toe_z"] + df_temp.loc[df_temp.dune_toe_z.isnull(), "top_elevation"] = df_temp.loc[ + df_temp.dune_toe_z.isnull(), "dune_crest_z" + ] + df_temp["top_x"] = df_temp["dune_toe_x"] + df_temp.loc[df_temp.dune_toe_x.isnull(), "top_x"] = df_temp.loc[df_temp.dune_toe_x.isnull(), "dune_crest_x"] with Pool(processes=n_processes) as pool: results = pool.starmap( @@ -66,7 +67,6 @@ def forecast_twl( [(site_id, df_temp, df_profiles, "top_elevation", "top_x", "mhw") for site_id in site_ids], ) - df_twl["beta"] = pd.concat(results) # Estimate runup diff --git a/src/analysis/observed_storm_impacts.py b/src/analysis/observed_storm_impacts.py index fd9f7b4..29a5dde 100644 --- a/src/analysis/observed_storm_impacts.py +++ b/src/analysis/observed_storm_impacts.py @@ -154,22 +154,20 @@ def overwrite_impacts(df_observed_impacts, df_raw_features): :param df_raw_profile_features: :return: """ - df_observed_impacts.update(df_raw_features.rename(columns={ - 'observed_storm_regime':'storm_regime'})) + df_observed_impacts.update(df_raw_features.rename(columns={"observed_storm_regime": "storm_regime"})) return df_observed_impacts - @click.command() @click.option("--profiles-csv", required=True, help="") @click.option("--profile-features-crest-toes-csv", required=True, help="") -@click.option("--raw-profile-features-csv", required=True,help="") +@click.option("--raw-profile-features-csv", required=True, help="") @click.option("--output-file", required=True, help="") -def create_observed_impacts(profiles_csv, profile_features_crest_toes_csv, raw_profile_features_csv,output_file): +def create_observed_impacts(profiles_csv, profile_features_crest_toes_csv, raw_profile_features_csv, output_file): - profiles_csv = './data/interim/profiles.csv' - profile_features_crest_toes_csv= './data/interim/profile_features_crest_toes.csv' - raw_profile_features_csv = './data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv' + profiles_csv = "./data/interim/profiles.csv" + profile_features_crest_toes_csv = "./data/interim/profile_features_crest_toes.csv" + raw_profile_features_csv = "./data/raw/profile_features_chris_leaman/profile_features_chris_leaman.csv" logger.info("Creating observed wave impacts") logger.info("Importing data") diff --git a/src/data/csv_to_geojson.py b/src/data/csv_to_geojson.py index 41b5c5a..301f419 100644 --- a/src/data/csv_to_geojson.py +++ b/src/data/csv_to_geojson.py @@ -76,8 +76,8 @@ def R_high_to_geojson(sites_csv, profiles_csv, crest_toes_csv, impacts_csv, outp # Take the intersection closest to the dune face. try: - dune_face_x = np.mean(df_crest_toes.loc[(site_id, 'prestorm'), :].x) - int_x = min(int_x, key=lambda x:abs(x-dune_face_x)) + dune_face_x = np.mean(df_crest_toes.loc[(site_id, "prestorm"), :].x) + int_x = min(int_x, key=lambda x: abs(x - dune_face_x)) except: continue diff --git a/src/data/parse_mat.py b/src/data/parse_mat.py index e56d669..10a3868 100644 --- a/src/data/parse_mat.py +++ b/src/data/parse_mat.py @@ -25,24 +25,25 @@ def parse_crest_toes(df_raw_features, df_profiles): """ # Puts profiles_features_csv into format expected by rest of analysis - df_crest_toes = df_raw_features.reset_index().melt(id_vars=['site_id'], - value_vars=['prestorm_dune_crest_x', 'prestorm_dune_toe_x', - 'poststorm_dune_crest_x', 'poststorm_dune_toe_x']) - df_crest_toes['profile_type'] = df_crest_toes.variable.str.extract(r'(prestorm|poststorm)') - df_crest_toes['point_type'] = df_crest_toes.variable.str.extract(r'(dune_crest_x|dune_toe_x)') - df_crest_toes = df_crest_toes.drop(columns=['variable']) - df_crest_toes = df_crest_toes.sort_values('site_id') - df_crest_toes = df_crest_toes.set_index(['site_id', 'profile_type', 'point_type']) + df_crest_toes = df_raw_features.reset_index().melt( + id_vars=["site_id"], + value_vars=["prestorm_dune_crest_x", "prestorm_dune_toe_x", "poststorm_dune_crest_x", "poststorm_dune_toe_x"], + ) + df_crest_toes["profile_type"] = df_crest_toes.variable.str.extract(r"(prestorm|poststorm)") + df_crest_toes["point_type"] = df_crest_toes.variable.str.extract(r"(dune_crest_x|dune_toe_x)") + df_crest_toes = df_crest_toes.drop(columns=["variable"]) + df_crest_toes = df_crest_toes.sort_values("site_id") + df_crest_toes = df_crest_toes.set_index(["site_id", "profile_type", "point_type"]) df_crest_toes = df_crest_toes.unstack() df_crest_toes.columns = df_crest_toes.columns.droplevel() # Now let's calculate the corresponding z elevations for each of our x coordinates for site_id in df_crest_toes.index.get_level_values("site_id").unique(): - logger.info('Calculating dune toe/crest z elevations for {}'.format(site_id)) + logger.info("Calculating dune toe/crest z elevations for {}".format(site_id)) # Get profile for this site idx = pd.IndexSlice - df_profile = df_profiles.loc[idx[site_id, :,:], :] + df_profile = df_profiles.loc[idx[site_id, :, :], :] for param in ["prestorm", "poststorm"]: for loc in ["crest", "toe"]: @@ -55,13 +56,13 @@ def parse_crest_toes(df_raw_features, df_profiles): continue # Try get the value from the other profile if we return nan or empty dataframe - df_z = df_profile.loc[idx[site_id, param, x_val],:] + df_z = df_profile.loc[idx[site_id, param, x_val], :] if df_z.empty: if param == "prestorm": - new_param = 'poststorm' + new_param = "poststorm" elif param == "poststorm": - new_param = 'prestorm' - z_val = df_profile.loc[idx[site_id, new_param, x_val],:].z + new_param = "prestorm" + z_val = df_profile.loc[idx[site_id, new_param, x_val], :].z else: z_val = df_z.z @@ -80,6 +81,7 @@ def parse_crest_toes(df_raw_features, df_profiles): return df_crest_toes + def parse_dune_crest_toes(df_sites, crest_mat, toe_mat): """ :param df_sites: @@ -128,7 +130,6 @@ def parse_dune_crest_toes(df_sites, crest_mat, toe_mat): return df_profile_features - def parse_waves(waves_mat): """ Parses the raw waves.mat file and returns a pandas dataframe @@ -426,14 +427,13 @@ def create_crest_toes(profile_features_csv, profiles_csv, output_file): logger.info("Creating %s", output_file) df_raw_features = pd.read_csv(profile_features_csv, index_col=[0]) - df_profiles = pd.read_csv(profiles_csv, index_col=[0,1,2]) + df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2]) df_crest_toes = parse_crest_toes(df_raw_features, df_profiles) - df_crest_toes.to_csv(output_file,float_format="%.3f") + df_crest_toes.to_csv(output_file, float_format="%.3f") logger.info("Created %s", output_file) - @click.command(short_help="create profiles.csv") @click.option("--profiles-mat", required=True, help=".mat file containing beach profiles") @click.option("--profiles-output-file", required=True, help="where to save profiles.csv") @@ -450,7 +450,7 @@ def create_sites_and_profiles_csv(profiles_mat, profiles_output_file, sites_outp df_profiles.to_csv(profiles_output_file) logger.info("Created %s", profiles_output_file) - df_sites.to_csv(sites_output_file,float_format="%.3f") + df_sites.to_csv(sites_output_file, float_format="%.3f") logger.info("Created %s", sites_output_file)