Merge branch 'bugfix/fix-cli' into develop

develop
Chris Leaman 6 years ago
commit c4d9e02821

@ -48,36 +48,43 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat
$(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit"
# # Produces a .csv of sites where our beach cross-sections are located
# ./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat
# activate ./.venv && python ./src/data/parse_mat.py create-sites-and-profiles-csv \
# --profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
# --profiles-output-file "./data/interim/profiles.csv" \
# --sites-output-file "./data/interim/sites.csv"
# Produces a .csv of sites where our beach cross-sections are located
./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat
activate ./.venv && python ./src/data/parse_mat.py create-sites-and-profiles-csv \
activate ./.venv && python ./src/cli.py create-sites-and-profiles-csv \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--profiles-output-file "./data/interim/profiles.csv" \
--sites-output-file "./data/interim/sites.csv"
# Produces a .csv of waves for each site
./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat
activate ./.venv && python ./src/data/parse_mat.py create-waves-csv \
activate ./.venv && python ./src/cli.py create-waves-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/waves.csv"
# Produces a .csv of tides for each site
./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat
activate ./.venv && python ./src/data/parse_mat.py create-tides-csv \
activate ./.venv && python ./src/cli.py create-tides-csv \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv"
# Creates a .shp of our sites to load into QGis
./data/interim/sites.shp: ./data/interim/sites.csv
activate ./.venv && python ./src/data/csv_to_shp.py sites-csv-to-shp \
activate ./.venv && python ./src/cli.py sites-csv-to-shp \
--input-csv "./data/interim/sites.csv" \
--output-shp "./data/interim/sites.shp"
# Creates a .csv of our dune toe and crest profile features
./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv
activate ./.venv && python ./src/data/profile_features.py create-profile-features \
activate ./.venv && python ./src/cli.py create-profile-features \
--dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \
--dune-toe-shp "./data/raw/profile_features/dune_toes.shp" \
--sites-csv "./data/interim/sites.csv" \
@ -86,7 +93,7 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
# Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope
./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
activate ./.venv && python ./src/analysis/forecast_twl.py create-twl-forecast \
activate ./.venv && python ./src/cli.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \
@ -96,7 +103,7 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
--output-file "./data/interim/twl_foreshore_slope_sto06.csv"
./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
activate ./.venv && python ./src/analysis/forecast_twl.py create-twl-forecast \
activate ./.venv && python ./src/cli.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \
@ -106,19 +113,19 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
--output-file "./data/interim/twl_mean_slope_sto06.csv"
./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv
activate ./.venv && python ./src/analysis/observed_storm_impacts.py create-observed-impacts \
activate ./.venv && python ./src/cli.py create-observed-impacts \
--profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \
--output-file "./data/interim/impacts_observed.csv"
./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv
activate ./.venv && python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
activate ./.venv && python ./src/cli.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv
activate ./.venv && python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
activate ./.venv && python ./src/cli.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv"

@ -73,4 +73,5 @@ been corrected for systematic errors, so actual elevations should be taken from
- [ ] Implement dune impact calculations as per Palmsten & Holman. Calculation should be done in a new dataframe.
- [ ] Implement data/interim/*.csv file checking using py.test. Check for correct columns, number of nans etc. Testing of code is probably a lower priority than just checking the interim data files at the moment.
- [ ] Investigate using [modin](https://github.com/modin-project/modin) to help speed up analysis.
- [ ] Need to think about how relative imports are handled, see [here](https://chrisyeh96.github.io/2017/08/08/definitive-guide-python-imports.html). Maybe the click CLI interface should be moved to the `./src/` folder and it can import all the other packages?
- [ ] Need to think about how relative imports are handled, see [here](https://chrisyeh96.github.io/2017/08/08/definitive-guide-python-imports.html). Maybe the click CLI interface should be moved to the `./src/` folder and it can import all the other packages?
- [ ] Simplify runup_models in Stockdon06 - we should really only have one function for each runup model. Need to make it work with individual values or entire dataframe. Use (np.maskedarray)[https://docs.scipy.org/doc/numpy-1.15.0/reference/maskedarray.generic.html]

@ -1,15 +0,0 @@
import pandas as pd
import os
def main():
data_folder = "./data/interim"
df_waves = pd.read_csv(os.path.join(data_folder, "waves.csv"), index_col=[0, 1])
df_tides = pd.read_csv(os.path.join(data_folder, "tides.csv"), index_col=[0, 1])
df_profiles = pd.read_csv(os.path.join(data_folder, "profiles.csv"), index_col=[0, 1, 2])
df_sites = pd.read_csv(os.path.join(data_folder, "sites.csv"), index_col=[0])
if __name__ == "__main__":
main()

@ -2,13 +2,13 @@
Compares forecasted and observed impacts, putting them into one data frame and exporting the results.
"""
import logging.config
import os
import pandas as pd
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__)
from utils import setup_logging
logger = setup_logging()
def compare_impacts(df_forecasted, df_observed):

@ -1,16 +1,16 @@
import logging.config
import os
from multiprocessing import Pool
import click
import numpy as np
import numpy.ma as ma
import pandas as pd
from scipy import stats
import runup_models
from analysis import runup_models
from utils import setup_logging
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__)
logger = setup_logging()
MULTIPROCESS_THREADS = int(os.environ.get("MULTIPROCESS_THREADS", 4))
@ -301,13 +301,3 @@ def create_twl_forecast(waves_csv, tides_csv, profiles_csv, profile_features_csv
df_twl_foreshore_slope_sto06.to_csv(output_file)
logger.info("Saved to %s", output_file)
logger.info("Done!")
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(create_twl_forecast)
cli()

@ -2,13 +2,12 @@
Estimates the forecasted storm impacts based on the forecasted water level and dune crest/toe.
"""
import logging.config
import os
import click
import pandas as pd
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__)
from utils import setup_logging
logger = setup_logging()
def forecasted_impacts(df_profile_features, df_forecasted_twl):
@ -72,7 +71,6 @@ def storm_regime(df_forecasted_impacts):
@click.option("--forecasted-twl-csv", required=True, help="")
@click.option("--output-file", required=True, help="")
def create_forecasted_impacts(profile_features_csv, forecasted_twl_csv, output_file):
logger.info("Creating observed wave impacts")
logger.info("Importing existing data")
df_profile_features = pd.read_csv(profile_features_csv, index_col=[0])
@ -82,13 +80,3 @@ def create_forecasted_impacts(profile_features_csv, forecasted_twl_csv, output_f
df_forecasted_impacts.to_csv(output_file)
logger.info("Saved to %s", output_file)
logger.info("Done!")
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(create_forecasted_impacts)
cli()

@ -1,12 +1,11 @@
import logging.config
import os
import click
import numpy as np
import pandas as pd
from scipy.integrate import simps
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__)
from utils import setup_logging
logger = setup_logging()
def return_first_or_nan(l):
@ -49,8 +48,8 @@ def volume_change(df_profiles, df_profile_features, zone):
prestorm_dune_toe_x = prestorm_dune_crest_x
# If no prestorm and poststorm profiles, skip site and continue
profile_lengths = [len(df_site.query("profile_type == '{}'".format(x))) for x in ['prestorm', 'poststorm']]
if any([length ==0 for length in profile_lengths]):
profile_lengths = [len(df_site.query("profile_type == '{}'".format(x))) for x in ["prestorm", "poststorm"]]
if any([length == 0 for length in profile_lengths]):
continue
# Find last x coordinate where we have both prestorm and poststorm measurements. If we don't do this,
@ -182,13 +181,3 @@ def create_observed_impacts(profiles_csv, profile_features_csv, output_file):
logger.info("Saved to %s", output_file)
logger.info("Done!")
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(create_observed_impacts)
cli()

@ -0,0 +1,29 @@
"""
Entry point to run data processing and analysis commands.
"""
import click
import data.parse_mat as parse_mat
import data.profile_features as profile_features
import data.csv_to_shp as csv_to_shp
import analysis.forecast_twl as forecast_twl
import analysis.forecasted_storm_impacts as forecasted_storm_impacts
import analysis.observed_storm_impacts as observed_storm_impacts
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(parse_mat.create_waves_csv)
cli.add_command(parse_mat.create_sites_and_profiles_csv)
cli.add_command(parse_mat.create_tides_csv)
cli.add_command(profile_features.create_profile_features)
cli.add_command(csv_to_shp.sites_csv_to_shp)
cli.add_command(forecast_twl.create_twl_forecast)
cli.add_command(forecasted_storm_impacts.create_forecasted_impacts)
cli.add_command(observed_storm_impacts.create_observed_impacts)
cli()

@ -1,12 +1,6 @@
"""
Converts .csv files to .shape files
"""
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import click
import fiona
import pandas as pd
@ -38,13 +32,3 @@ def sites_csv_to_shp(input_csv, output_shp):
prop = {"beach": row["beach"], "site_id": index}
output.write({"geometry": mapping(point), "properties": prop})
logger.info("Done!")
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(sites_csv_to_shp)
cli()

@ -2,14 +2,8 @@
Converts raw .mat files into a flattened .csv structure which can be imported into python pandas.
"""
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from datetime import datetime, timedelta
import math
from datetime import datetime, timedelta
import click
import numpy as np
@ -17,7 +11,7 @@ import pandas as pd
from mat4py import loadmat
from shapely.geometry import Point
from profile_features import convert_coord_systems
from data.profile_features import convert_coord_systems
from utils import setup_logging
logger = setup_logging()

@ -1,5 +1,5 @@
import os
from functools import partial
import click
import fiona
import numpy as np
@ -8,11 +8,10 @@ import pyproj
from shapely.geometry import LineString, Point
from shapely.geometry import shape
from shapely.ops import transform
import logging.config
from utils import setup_logging
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__)
logger = setup_logging()
def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPSG:28356"):
@ -160,13 +159,3 @@ def create_profile_features(dune_crest_shp, dune_toe_shp, sites_csv, profiles_cs
df_profile_features = parse_profile_features(df_sites, df_profiles, dune_crest_shp, dune_toe_shp)
df_profile_features.to_csv(output_csv)
logger.info("Done!")
@click.group()
def cli():
pass
if __name__ == "__main__":
cli.add_command(create_profile_features)
cli()

@ -1,32 +0,0 @@
[loggers]
keys=root, matplotlib, fiona
[handlers]
keys=consoleHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=DEBUG
handlers=consoleHandler
[logger_matplotlib]
level=WARNING
handlers=consoleHandler
qualname=matplotlib
[logger_fiona]
level=WARNING
handlers=consoleHandler
qualname=fiona
[handler_consoleHandler]
class=StreamHandler
level=INFO
formatter=simpleFormatter
args=(sys.stdout,)
[formatter_simpleFormatter]
format=%(asctime)s %(name)-17s %(levelname)-8s %(message)s
datefmt=%a, %d %b %Y %H:%M:%S
Loading…
Cancel
Save