diff --git a/src/cli.py b/src/cli.py index 5c048b3..210b91f 100644 --- a/src/cli.py +++ b/src/cli.py @@ -21,7 +21,8 @@ if __name__ == "__main__": cli.add_command(parse_mat.create_waves_csv) cli.add_command(parse_mat.create_sites_and_profiles_csv) cli.add_command(parse_mat.create_tides_csv) - cli.add_command(profile_features.create_profile_features) + cli.add_command(parse_mat.create_profile_features) + # cli.add_command(profile_features.create_profile_features) cli.add_command(csv_to_shp.sites_csv_to_shp) cli.add_command(forecast_twl.create_twl_forecast) cli.add_command(forecasted_storm_impacts.create_forecasted_impacts) diff --git a/src/data/parse_mat.py b/src/data/parse_mat.py index 7a368da..e7afd84 100644 --- a/src/data/parse_mat.py +++ b/src/data/parse_mat.py @@ -45,6 +45,48 @@ def parse_orientations(orientations_mat): return df +def parse_dune_crest_toes(df_sites, crest_mat, toe_mat): + """ + :param df_sites: + :param crest_mat: + :param toe_mat: + :return: + """ + logger.info("Parsing dune crests and toes") + + rows = [] + crest_data = loadmat(crest_mat) + toe_data = loadmat(toe_mat) + + for n, _ in enumerate(crest_data['xc1']): + rows.extend([{ + 'dune_crest_x': crest_data['xc1'][n], + 'dune_crest_z': crest_data['zc1'][n], + 'dune_toe_x': toe_data['xt1'][n], + 'dune_toe_z': toe_data['zt1'][n], + 'profile_type': 'prestorm', + 'site_no': n+1 + },{ + 'dune_crest_x': crest_data['xc2'][n], + 'dune_crest_z': crest_data['zc2'][n], + 'dune_toe_x': toe_data['xt2'][n], + 'dune_toe_z': toe_data['zt2'][n], + 'profile_type': 'poststorm', + 'site_no': n + 1 + }]) + + df_profile_features = pd.DataFrame(rows) + + # Want the site_id instead of the site_no, so merge in df_sites + df_sites.reset_index(inplace=True) + df_profile_features = df_sites[['site_no','site_id']].merge(df_profile_features, how='outer', on=['site_no']) + df_profile_features.drop(columns=['site_no'],inplace=True) + df_profile_features.set_index(['site_id','profile_type'], inplace=True) + df_profile_features.sort_index(inplace=True) + df_profile_features = df_profile_features.round(3) + + return df_profile_features + def combine_sites_and_orientaions(df_sites, df_orientations): """ Replaces beach/lat/lon columns with the unique site_id. @@ -206,7 +248,7 @@ def parse_profiles_and_sites(profiles_mat): profile_rows.append( { "site_id": site_id, - "site_no": i, + "lon": lon[0], "lat": lat[0], "profile_type": profile_type, @@ -226,6 +268,7 @@ def parse_profiles_and_sites(profiles_mat): site_rows.append( { "site_id": site_id, + "site_no": i + 1, "beach": site, "lat": x_200_lat, "lon": x_200_lon, @@ -332,6 +375,18 @@ def create_waves_csv(waves_mat, sites_csv, output_file): logger.info("Created %s", output_file) +@click.command(short_help="create profile_features.csv") +@click.option("--crest-mat", required=True, help=".mat file containing wave records") +@click.option("--toe-mat", required=True, help=".mat file containing wave records") +@click.option("--sites-csv", required=True, help=".csv file description of cross section sites") +@click.option("--output-file", required=True, help="where to save waves.csv") +def create_profile_features(crest_mat, toe_mat, sites_csv, output_file): + logger.info("Creating %s", output_file) + df_sites = pd.read_csv(sites_csv, index_col=[0]) + df_profile_features = parse_dune_crest_toes(df_sites, crest_mat, toe_mat) + df_profile_features.to_csv(output_file) + logger.info("Created %s", output_file) + @click.command(short_help="create profiles.csv") @click.option("--profiles-mat", required=True, help=".mat file containing beach profiles") @click.option("--profiles-output-file", required=True, help="where to save profiles.csv")