Merge branch 'bugfix/change-profiles-mat' into develop

develop
Chris Leaman 6 years ago
commit 8033df931c

@ -10,11 +10,9 @@ MATLAB_PATH="C:/Program Files/MATLAB/R2016b/bin/win64/MATLAB.exe"
# total water level. # total water level.
MULTIPROCESS_THREADS=2 MULTIPROCESS_THREADS=2
# The settings below should be left as is unless you know what you're doing.
# We want to create the pipenv virtualenv in the current folder # The settings below should be left as is unless you know what you're doing.
PIPENV_VENV_IN_PROJECT=1
# Need to set pythonpath so that relative imports can be properly used in with pipenv # Need to set pythonpath so that relative imports can be properly used in with pipenv
# Refer to https://stackoverflow.com/q/52986500 and https://stackoverflow.com/a/49797761 # Refer to https://stackoverflow.com/q/52986500 and https://stackoverflow.com/a/49797761
PYTHONPATH=${PWD} # PYTHONPATH=${PWD}

1
.gitignore vendored

@ -18,3 +18,4 @@ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
/.venv/ /.venv/
*.log

@ -1,3 +1,5 @@
SHELL=cmd
############################### ###############################
# Load environment variables # Load environment variables
@ -12,13 +14,21 @@ CURRENT_DIR = $(shell pwd)
. PHONY: venv_init . PHONY: venv_init
venv-init: ##@environment Setup virtual environment venv-init: ##@environment Setup virtual environment
pip install pipenv conda create -f environment.yml --prefix=.venv python=3.7
pipenv --python 3.7
pipenv install venv-activate: ##@environment Activates the virtual environment
activate $(CURRENT_DIR)/.venv
venv-requirements-install: ##@environment Ensures environment.yml packages are installed
conda env update
venv-requirements-export: ##@environment Exports current environment to environment.yml
conda env export --file environment.yml
############################### ###############################
# Get data from network drive # Get data from network drive
.PHONY: push-data pull-data
push-data: ##@data Copies data from ./data/ to data backup directory push-data: ##@data Copies data from ./data/ to data backup directory
rclone copy ./data/ $(DATA_BACKUP_DIR) --exclude "*.las" --progress rclone copy ./data/ $(DATA_BACKUP_DIR) --exclude "*.las" --progress
@ -32,51 +42,42 @@ pull-data: ##@data Copies data from data backup directory to ./data/
# Process data # Process data
.PHONY: process-mat .PHONY: process-mat
process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/profiles.csv ./data/interim/tides.csv ##@data Process all .mat to .csv impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.csv ./data/interim/impacts_observed.csv ##@products makes obsered and forecasted impacts
# Calculates beach orientations at each profile # Calculates beach orientations at each profile
./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat ./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat
$(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit" $(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit"
# Produces a .csv of sites where our beach cross-sections are located # Produces a .csv of sites where our beach cross-sections are located
./data/interim/sites.csv: ./data/raw/processed_shorelines/*.mat ./data/interim/sites.csv ./data/interim/profiles.csv: ./data/raw/processed_shorelines/profiles.mat
pipenv run python ./src/data/parse_mat.py create-sites-csv \ activate ./.venv && python ./src/data/parse_mat.py create-sites-and-profiles-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \ --profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--orientations-mat "./data/raw/processed_shorelines/orientations.mat" \ --profiles-output-file "./data/interim/profiles.csv" \
--output-file "./data/interim/sites.csv" --sites-output-file "./data/interim/sites.csv"
# Produces a .csv of waves for each site # Produces a .csv of waves for each site
./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat ./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat
pipenv run python ./src/data/parse_mat.py create-waves-csv \ activate ./.venv && python ./src/data/parse_mat.py create-waves-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \ --waves-mat "./data/raw/processed_shorelines/waves.mat" \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/waves.csv" --output-file "./data/interim/waves.csv"
# Produces a .csv of profiles for each site
./data/interim/profiles.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/profiles.mat
pipenv run python ./src/data/parse_mat.py create-profiles-csv \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/profiles.csv"
# Produces a .csv of tides for each site # Produces a .csv of tides for each site
./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat ./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat
pipenv run python ./src/data/parse_mat.py create-tides-csv \ activate ./.venv && python ./src/data/parse_mat.py create-tides-csv \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \ --tides-mat "./data/raw/processed_shorelines/tides.mat" \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv" --output-file "./data/interim/tides.csv"
# Creates a .shp of our sites to load into QGis # Creates a .shp of our sites to load into QGis
./data/interim/sites.shp: ./data/interim/sites.csv ./data/interim/sites.shp: ./data/interim/sites.csv
pipenv run python ./src/data/csv_to_shp.py sites-csv-to-shp \ activate ./.venv && python ./src/data/csv_to_shp.py sites-csv-to-shp \
--input-csv "./data/interim/sites.csv" \ --input-csv "./data/interim/sites.csv" \
--output-shp "./data/interim/sites.shp" --output-shp "./data/interim/sites.shp"
# Creates a .csv of our dune toe and crest profile features # Creates a .csv of our dune toe and crest profile features
./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv ./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv
pipenv run python ./src/data/profile_features.py create-profile-features \ activate ./.venv && python ./src/data/profile_features.py create-profile-features \
--dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \ --dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \
--dune-toe-shp "./data/raw/profile_features/dune_toes.shp" \ --dune-toe-shp "./data/raw/profile_features/dune_toes.shp" \
--sites-csv "./data/interim/sites.csv" \ --sites-csv "./data/interim/sites.csv" \
@ -85,7 +86,7 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr
# Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope # Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope
./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \ activate ./.venv && python ./src/analysis/forecast_twl.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \ --waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \ --tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
@ -94,9 +95,8 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr
--slope "foreshore" \ --slope "foreshore" \
--output-file "./data/interim/twl_foreshore_slope_sto06.csv" --output-file "./data/interim/twl_foreshore_slope_sto06.csv"
# Creates a forecast of twl using sto06 and prestorm mean foreshore slope
./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \ activate ./.venv && python ./src/analysis/forecast_twl.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \ --waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \ --tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
@ -106,41 +106,28 @@ process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/pr
--output-file "./data/interim/twl_mean_slope_sto06.csv" --output-file "./data/interim/twl_mean_slope_sto06.csv"
./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv ./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/observed_storm_impacts.py create-observed-impacts \ activate ./.venv && python ./src/analysis/observed_storm_impacts.py create-observed-impacts \
--profiles-csv "./data/interim/profiles.csv" \ --profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features.csv" \
--output-file "./data/interim/impacts_observed.csv" --output-file "./data/interim/impacts_observed.csv"
./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv ./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv
pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \ activate ./.venv && python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \ --forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv" --output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv
pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \ activate ./.venv && python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \ --profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \ --forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv" --output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv"
#################################################################################
# PROJECT RULES #
#################################################################################
.PHONY: push-data parse_mat sites-csv-to-shp
mat-to-csv: ##@data Converts raw .mat files to .csv for python
cd ./src/data/ && python parse_mat.py
sites-csv-to-shp: ##@data Create the sites.shp from sites.csv
cd ./src/data && python csv_to_shp.py sites_csv_to_shp "..\..\data\interim\sites.csv" "..\..\data\interim\sites.shp"
############################### ###############################
# Misc commands # Misc commands
format: ./src/*.py ##@misc Check python file formatting format: ./src/*.py ##@misc Check python file formatting
pipenv run black --line-length 120 "src/" activate ./.venv && black --line-length 120 "src/"
############################### ###############################

@ -1,31 +0,0 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
[packages]
numpy = "*"
scipy = "*"
pandas = "*"
matplotlib = "*"
click = "*"
mat4py = "*"
black = "*"
shapely = "*"
fiona = {file = "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/Fiona-1.8.2-cp37-cp37m-win_amd64.whl"}
gdal = {file = "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/GDAL-2.3.2-cp37-cp37m-win_amd64.whl"}
pyproj = {file = "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/pyproj-1.9.5.1-cp37-cp37m-win_amd64.whl"}
colorlover = "*"
ipykernel = "*"
jupyter = "*"
plotly = "*"
jupyter-contrib-nbextensions = "*"
jupyter-nbextensions-configurator = "*"
[requires]
python_version = "3.7"
[pipenv]
allow_prereleases = true

753
Pipfile.lock generated

@ -1,753 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "d1086c407632a2c2f9a8e1b657d7e0703ac8929bb3aa41f68ea364448c47cb33"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"appdirs": {
"hashes": [
"sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92",
"sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"
],
"version": "==1.4.3"
},
"attrs": {
"hashes": [
"sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
"sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
],
"version": "==18.2.0"
},
"backcall": {
"hashes": [
"sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4",
"sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2"
],
"version": "==0.1.0"
},
"black": {
"hashes": [
"sha256:817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739",
"sha256:e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5"
],
"index": "pypi",
"version": "==18.9b0"
},
"bleach": {
"hashes": [
"sha256:48d39675b80a75f6d1c3bdbffec791cf0bbbab665cf01e20da701c77de278718",
"sha256:73d26f018af5d5adcdabf5c1c974add4361a9c76af215fe32fdec8a6fc5fb9b9"
],
"version": "==3.0.2"
},
"certifi": {
"hashes": [
"sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c",
"sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a"
],
"version": "==2018.10.15"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"click": {
"hashes": [
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
],
"index": "pypi",
"version": "==7.0"
},
"click-plugins": {
"hashes": [
"sha256:b1ee1ccc9421c73007fe290680d97984eb6eaf5f4512b7620c6aa46031d6cb6b",
"sha256:dfed74b5063546a137de99baaaf742b4de4337ad2b3e1df5ec7c8a256adc0847"
],
"version": "==1.0.4"
},
"cligj": {
"hashes": [
"sha256:20f24ce9abfde3f758aec3399e6811b936b6772f360846c662c19bf5537b4f14",
"sha256:60c93dda4499562eb87509a8ff3535a7441053b766c9c26bcf874a732f939c7c",
"sha256:6c7d52d529a78712491974f975c33473f430c0f7beb18c0d7a402a743dcb460a"
],
"version": "==0.5.0"
},
"colorama": {
"hashes": [
"sha256:a3d89af5db9e9806a779a50296b5fdb466e281147c2c235e8225ecc6dbf7bbf3",
"sha256:c9b54bebe91a6a803e0772c8561d53f2926bfeb17cd141fbabcb08424086595c"
],
"markers": "sys_platform == 'win32'",
"version": "==0.4.0"
},
"colorlover": {
"hashes": [
"sha256:f12a091ca2bca29e0e9294a072693bc70d2fafc573bb7c0fc8070099b5de9cb2"
],
"index": "pypi",
"version": "==0.2.1"
},
"cycler": {
"hashes": [
"sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d",
"sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"
],
"version": "==0.10.0"
},
"decorator": {
"hashes": [
"sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82",
"sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c"
],
"version": "==4.3.0"
},
"defusedxml": {
"hashes": [
"sha256:24d7f2f94f7f3cb6061acb215685e5125fbcdc40a857eff9de22518820b0a4f4",
"sha256:702a91ade2968a82beb0db1e0766a6a273f33d4616a6ce8cde475d8e09853b20"
],
"version": "==0.5.0"
},
"entrypoints": {
"hashes": [
"sha256:10ad569bb245e7e2ba425285b9fa3e8178a0dc92fc53b1e1c553805e15a8825b",
"sha256:d2d587dde06f99545fb13a383d2cd336a8ff1f359c5839ce3a64c917d10c029f"
],
"version": "==0.2.3"
},
"fiona": {
"file": "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/Fiona-1.8.2-cp37-cp37m-win_amd64.whl",
"hashes": [
"sha256:ea6f9f7914fe25e7e9945cabf628edb94c483702f8181d9b868b6bfffec2db25"
],
"index": "pypi",
"version": "==1.8.2"
},
"gdal": {
"file": "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/GDAL-2.3.2-cp37-cp37m-win_amd64.whl",
"hashes": [
"sha256:2f6c36ee59f9b24fb16514e4fce8b73e7833714feb9b8397f91662256e1b12d8"
],
"index": "pypi",
"version": "==2.3.2"
},
"idna": {
"hashes": [
"sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e",
"sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
],
"version": "==2.7"
},
"ipykernel": {
"hashes": [
"sha256:0aeb7ec277ac42cc2b59ae3d08b10909b2ec161dc6908096210527162b53675d",
"sha256:0fc0bf97920d454102168ec2008620066878848fcfca06c22b669696212e292f"
],
"index": "pypi",
"version": "==5.1.0"
},
"ipython": {
"hashes": [
"sha256:a5781d6934a3341a1f9acb4ea5acdc7ea0a0855e689dbe755d070ca51e995435",
"sha256:b10a7ddd03657c761fc503495bc36471c8158e3fc948573fb9fe82a7029d8efd"
],
"markers": "python_version >= '3.3'",
"version": "==7.1.1"
},
"ipython-genutils": {
"hashes": [
"sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8",
"sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
],
"version": "==0.2.0"
},
"ipywidgets": {
"hashes": [
"sha256:0f2b5cde9f272cb49d52f3f0889fdd1a7ae1e74f37b48dac35a83152780d2b7b",
"sha256:a3e224f430163f767047ab9a042fc55adbcab0c24bbe6cf9f306c4f89fdf0ba3"
],
"version": "==7.4.2"
},
"jedi": {
"hashes": [
"sha256:0191c447165f798e6a730285f2eee783fff81b0d3df261945ecb80983b5c3ca7",
"sha256:b7493f73a2febe0dc33d51c99b474547f7f6c0b2c8fb2b21f453eef204c12148"
],
"version": "==0.13.1"
},
"jinja2": {
"hashes": [
"sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
"sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
],
"version": "==2.10"
},
"jsonschema": {
"hashes": [
"sha256:3ae8afd6f4ca6417f14bf43ef61341311598f14234cdb4174fe43d42b236a3c8",
"sha256:dfd8426040892c8d0ef6da574085f282569f189cb24b70091a66c21c12d6705e"
],
"version": "==3.0.0a3"
},
"jupyter": {
"hashes": [
"sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7",
"sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78",
"sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"
],
"index": "pypi",
"version": "==1.0.0"
},
"jupyter-client": {
"hashes": [
"sha256:27befcf0446b01e29853014d6a902dd101ad7d7f94e2252b1adca17c3466b761",
"sha256:59e6d791e22a8002ad0e80b78c6fd6deecab4f9e1b1aa1a22f4213de271b29ea"
],
"version": "==5.2.3"
},
"jupyter-console": {
"hashes": [
"sha256:308ce876354924fb6c540b41d5d6d08acfc946984bf0c97777c1ddcb42e0b2f5",
"sha256:cc80a97a5c389cbd30252ffb5ce7cefd4b66bde98219edd16bf5cb6f84bb3568"
],
"version": "==6.0.0"
},
"jupyter-contrib-core": {
"hashes": [
"sha256:1ec81e275a8f5858d56b0c4c6cd85335aa8e915001b8657fe51c620c3cdde50f",
"sha256:e65bc0e932ff31801003cef160a4665f2812efe26a53801925a634735e9a5794"
],
"version": "==0.3.3"
},
"jupyter-contrib-nbextensions": {
"hashes": [
"sha256:40eba9492d22302599d0a8f29d1297efb06e233677fe2d6f4d224e7c3e373872",
"sha256:f4893d99fed6be6587cd2c722ef8841556283a697a482288b621b514beda2405"
],
"index": "pypi",
"version": "==0.5.0"
},
"jupyter-core": {
"hashes": [
"sha256:927d713ffa616ea11972534411544589976b2493fc7e09ad946e010aa7eb9970",
"sha256:ba70754aa680300306c699790128f6fbd8c306ee5927976cbe48adacf240c0b7"
],
"version": "==4.4.0"
},
"jupyter-highlight-selected-word": {
"hashes": [
"sha256:9545dfa9cb057eebe3a5795604dcd3a5294ea18637e553f61a0b67c1b5903c58",
"sha256:9fa740424859a807950ca08d2bfd28a35154cd32dd6d50ac4e0950022adc0e7b"
],
"version": "==0.2.0"
},
"jupyter-latex-envs": {
"hashes": [
"sha256:b0a83e0cda2d33e61c4b2da94365d2de4dfcdc1ed67abdba3cbe390872cf5231"
],
"version": "==1.4.4"
},
"jupyter-nbextensions-configurator": {
"hashes": [
"sha256:778f36a0996e622c224589327405583f4a539d183fa2fdaeecbc4397c1af9991",
"sha256:e2cea15dde24c9090104cf3ebedd6bcd354004cbea5e858b3776372ad50b7d46"
],
"index": "pypi",
"version": "==0.4.0"
},
"kiwisolver": {
"hashes": [
"sha256:0ee4ed8b3ae8f5f712b0aa9ebd2858b5b232f1b9a96b0943dceb34df2a223bc3",
"sha256:0f7f532f3c94e99545a29f4c3f05637f4d2713e7fd91b4dd8abfc18340b86cd5",
"sha256:1a078f5dd7e99317098f0e0d490257fd0349d79363e8c923d5bb76428f318421",
"sha256:1aa0b55a0eb1bd3fa82e704f44fb8f16e26702af1a073cc5030eea399e617b56",
"sha256:2874060b91e131ceeff00574b7c2140749c9355817a4ed498e82a4ffa308ecbc",
"sha256:379d97783ba8d2934d52221c833407f20ca287b36d949b4bba6c75274bcf6363",
"sha256:3b791ddf2aefc56382aadc26ea5b352e86a2921e4e85c31c1f770f527eb06ce4",
"sha256:4329008a167fac233e398e8a600d1b91539dc33c5a3eadee84c0d4b04d4494fa",
"sha256:45813e0873bbb679334a161b28cb9606d9665e70561fd6caa8863e279b5e464b",
"sha256:53a5b27e6b5717bdc0125338a822605084054c80f382051fb945d2c0e6899a20",
"sha256:574f24b9805cb1c72d02b9f7749aa0cc0b81aa82571be5201aa1453190390ae5",
"sha256:66f82819ff47fa67a11540da96966fb9245504b7f496034f534b81cacf333861",
"sha256:79e5fe3ccd5144ae80777e12973027bd2f4f5e3ae8eb286cabe787bed9780138",
"sha256:83410258eb886f3456714eea4d4304db3a1fc8624623fc3f38a487ab36c0f653",
"sha256:8b6a7b596ce1d2a6d93c3562f1178ebd3b7bb445b3b0dd33b09f9255e312a965",
"sha256:9576cb63897fbfa69df60f994082c3f4b8e6adb49cccb60efb2a80a208e6f996",
"sha256:95a25d9f3449046ecbe9065be8f8380c03c56081bc5d41fe0fb964aaa30b2195",
"sha256:a424f048bebc4476620e77f3e4d1f282920cef9bc376ba16d0b8fe97eec87cde",
"sha256:aaec1cfd94f4f3e9a25e144d5b0ed1eb8a9596ec36d7318a504d813412563a85",
"sha256:acb673eecbae089ea3be3dcf75bfe45fc8d4dcdc951e27d8691887963cf421c7",
"sha256:b15bc8d2c2848a4a7c04f76c9b3dc3561e95d4dabc6b4f24bfabe5fd81a0b14f",
"sha256:b1c240d565e977d80c0083404c01e4d59c5772c977fae2c483f100567f50847b",
"sha256:c595693de998461bcd49b8d20568c8870b3209b8ea323b2a7b0ea86d85864694",
"sha256:ce3be5d520b4d2c3e5eeb4cd2ef62b9b9ab8ac6b6fedbaa0e39cdb6f50644278",
"sha256:e0f910f84b35c36a3513b96d816e6442ae138862257ae18a0019d2fc67b041dc",
"sha256:ea36e19ac0a483eea239320aef0bd40702404ff8c7e42179a2d9d36c5afcb55c",
"sha256:efabbcd4f406b532206b8801058c8bab9e79645b9880329253ae3322b7b02cd5",
"sha256:f923406e6b32c86309261b8195e24e18b6a8801df0cfc7814ac44017bfcb3939"
],
"version": "==1.0.1"
},
"lxml": {
"hashes": [
"sha256:02bc220d61f46e9b9d5a53c361ef95e9f5e1d27171cd461dddb17677ae2289a5",
"sha256:22f253b542a342755f6cfc047fe4d3a296515cf9b542bc6e261af45a80b8caf6",
"sha256:2f31145c7ff665b330919bfa44aacd3a0211a76ca7e7b441039d2a0b0451e415",
"sha256:36720698c29e7a9626a0dc802ef8885f8f0239bfd1689628ecd459a061f2807f",
"sha256:438a1b0203545521f6616132bfe0f4bca86f8a401364008b30e2b26ec408ce85",
"sha256:4815892904c336bbaf73dafd54f45f69f4021c22b5bad7332176bbf4fb830568",
"sha256:5be031b0f15ad63910d8e5038b489d95a79929513b3634ad4babf77100602588",
"sha256:5c93ae37c3c588e829b037fdfbd64a6e40c901d3f93f7beed6d724c44829a3ad",
"sha256:60842230678674cdac4a1cf0f707ef12d75b9a4fc4a565add4f710b5fcf185d5",
"sha256:62939a8bb6758d1bf923aa1c13f0bcfa9bf5b2fc0f5fa917a6e25db5fe0cfa4e",
"sha256:75830c06a62fe7b8fe3bbb5f269f0b308f19f3949ac81cfd40062f47c1455faf",
"sha256:81992565b74332c7c1aff6a913a3e906771aa81c9d0c68c68113cffcae45bc53",
"sha256:8c892fb0ee52c594d9a7751c7d7356056a9682674b92cc1c4dc968ff0f30c52f",
"sha256:9d862e3cf4fc1f2837dedce9c42269c8c76d027e49820a548ac89fdcee1e361f",
"sha256:a623965c086a6e91bb703d4da62dabe59fe88888e82c4117d544e11fd74835d6",
"sha256:a7783ab7f6a508b0510490cef9f857b763d796ba7476d9703f89722928d1e113",
"sha256:aab09fbe8abfa3b9ce62aaf45aca2d28726b1b9ee44871dbe644050a2fff4940",
"sha256:abf181934ac3ef193832fb973fd7f6149b5c531903c2ec0f1220941d73eee601",
"sha256:ae07fa0c115733fce1e9da96a3ac3fa24801742ca17e917e0c79d63a01eeb843",
"sha256:b9c78242219f674ab645ec571c9a95d70f381319a23911941cd2358a8e0521cf",
"sha256:bccb267678b870d9782c3b44d0cefe3ba0e329f9af8c946d32bf3778e7a4f271",
"sha256:c4df4d27f4c93b2cef74579f00b1d3a31a929c7d8023f870c4b476f03a274db4",
"sha256:caf0e50b546bb60dfa99bb18dfa6748458a83131ecdceaf5c071d74907e7e78a",
"sha256:d3266bd3ac59ac4edcd5fa75165dee80b94a3e5c91049df5f7c057ccf097551c",
"sha256:db0d213987bcd4e6d41710fb4532b22315b0d8fb439ff901782234456556aed1",
"sha256:dbbd5cf7690a40a9f0a9325ab480d0fccf46d16b378eefc08e195d84299bfae1",
"sha256:e16e07a0ec3a75b5ee61f2b1003c35696738f937dc8148fbda9fe2147ccb6e61",
"sha256:e175a006725c7faadbe69e791877d09936c0ef2cf49d01b60a6c1efcb0e8be6f",
"sha256:edd9c13a97f6550f9da2236126bb51c092b3b1ce6187f2bd966533ad794bbb5e",
"sha256:fa39ea60d527fbdd94215b5e5552f1c6a912624521093f1384a491a8ad89ad8b"
],
"version": "==4.2.5"
},
"markupsafe": {
"hashes": [
"sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1"
],
"version": "==1.1.0"
},
"mat4py": {
"hashes": [
"sha256:8272ce80747120ff44200b1fde341c657595813e1adf61262e44b52642c10dbe"
],
"index": "pypi",
"version": "==0.4.1"
},
"matplotlib": {
"hashes": [
"sha256:16aa61846efddf91df623bbb4598e63be1068a6b6a2e6361cc802b41c7a286eb",
"sha256:1975b71a33ac986bb39b6d5cfbc15c7b1f218f1134efb4eb3881839d6ae69984",
"sha256:2b222744bd54781e6cc0b717fa35a54e5f176ba2ced337f27c5b435b334ef854",
"sha256:317643c0e88fad55414347216362b2e229c130edd5655fea5f8159a803098468",
"sha256:4269ce3d1b897d46fc3cc2273a0cc2a730345bb47e4456af662e6fca85c89dd7",
"sha256:65214fd668975077cdf8d408ccf2b2d6bdf73b4e6895a79f8e99ce4f0b43fcdb",
"sha256:74bc213ab8a92d86a0b304d9359d1e1d14168d4c6121b83862c9d8a88b89a738",
"sha256:88949be0db54755995dfb0210d0099a8712a3c696c860441971354c3debfc4af",
"sha256:8e1223d868be89423ec95ada5f37aa408ee64fe76ccb8e4d5f533699ba4c0e4a",
"sha256:9fa00f2d7a552a95fa6016e498fdeb6d74df537853dda79a9055c53dfc8b6e1a",
"sha256:c27fd46cab905097ba4bc28d5ba5289930f313fb1970c9d41092c9975b80e9b4",
"sha256:c94b792af431f6adb6859eb218137acd9a35f4f7442cea57e4a59c54751c36af",
"sha256:f4c12a01eb2dc16693887a874ba948b18c92f425c4d329639ece6d3bb8e631bb"
],
"index": "pypi",
"version": "==3.0.2"
},
"mistune": {
"hashes": [
"sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e",
"sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"
],
"version": "==0.8.4"
},
"munch": {
"hashes": [
"sha256:6ae3d26b837feacf732fb8aa5b842130da1daf221f5af9f9d4b2a0a6414b0d51"
],
"version": "==2.3.2"
},
"nbconvert": {
"hashes": [
"sha256:08d21cf4203fabafd0d09bbd63f06131b411db8ebeede34b0fd4be4548351779",
"sha256:a8a2749f972592aa9250db975304af6b7337f32337e523a2c995cc9e12c07807"
],
"version": "==5.4.0"
},
"nbformat": {
"hashes": [
"sha256:b9a0dbdbd45bb034f4f8893cafd6f652ea08c8c1674ba83f2dc55d3955743b0b",
"sha256:f7494ef0df60766b7cabe0a3651556345a963b74dbc16bc7c18479041170d402"
],
"version": "==4.4.0"
},
"notebook": {
"hashes": [
"sha256:661341909008d1e7bfa1541904006f9789fa3de1cbec8379d2879819454cc04b",
"sha256:91705b109fc785198faed892489cddb233265564d5e2dad5e4f7974af05ee8dd"
],
"version": "==5.7.2"
},
"numpy": {
"hashes": [
"sha256:0df89ca13c25eaa1621a3f09af4c8ba20da849692dcae184cb55e80952c453fb",
"sha256:154c35f195fd3e1fad2569930ca51907057ae35e03938f89a8aedae91dd1b7c7",
"sha256:18e84323cdb8de3325e741a7a8dd4a82db74fde363dce32b625324c7b32aa6d7",
"sha256:1e8956c37fc138d65ded2d96ab3949bd49038cc6e8a4494b1515b0ba88c91565",
"sha256:23557bdbca3ccbde3abaa12a6e82299bc92d2b9139011f8c16ca1bb8c75d1e95",
"sha256:24fd645a5e5d224aa6e39d93e4a722fafa9160154f296fd5ef9580191c755053",
"sha256:36e36b6868e4440760d4b9b44587ea1dc1f06532858d10abba98e851e154ca70",
"sha256:3d734559db35aa3697dadcea492a423118c5c55d176da2f3be9c98d4803fc2a7",
"sha256:416a2070acf3a2b5d586f9a6507bb97e33574df5bd7508ea970bbf4fc563fa52",
"sha256:4a22dc3f5221a644dfe4a63bf990052cc674ef12a157b1056969079985c92816",
"sha256:4d8d3e5aa6087490912c14a3c10fbdd380b40b421c13920ff468163bc50e016f",
"sha256:4f41fd159fba1245e1958a99d349df49c616b133636e0cf668f169bce2aeac2d",
"sha256:561ef098c50f91fbac2cc9305b68c915e9eb915a74d9038ecf8af274d748f76f",
"sha256:56994e14b386b5c0a9b875a76d22d707b315fa037affc7819cda08b6d0489756",
"sha256:73a1f2a529604c50c262179fcca59c87a05ff4614fe8a15c186934d84d09d9a5",
"sha256:7da99445fd890206bfcc7419f79871ba8e73d9d9e6b82fe09980bc5bb4efc35f",
"sha256:99d59e0bcadac4aa3280616591fb7bcd560e2218f5e31d5223a2e12a1425d495",
"sha256:a4cc09489843c70b22e8373ca3dfa52b3fab778b57cf81462f1203b0852e95e3",
"sha256:a61dc29cfca9831a03442a21d4b5fd77e3067beca4b5f81f1a89a04a71cf93fa",
"sha256:b1853df739b32fa913cc59ad9137caa9cc3d97ff871e2bbd89c2a2a1d4a69451",
"sha256:b1f44c335532c0581b77491b7715a871d0dd72e97487ac0f57337ccf3ab3469b",
"sha256:b261e0cb0d6faa8fd6863af26d30351fd2ffdb15b82e51e81e96b9e9e2e7ba16",
"sha256:c857ae5dba375ea26a6228f98c195fec0898a0fd91bcf0e8a0cae6d9faf3eca7",
"sha256:cf5bb4a7d53a71bb6a0144d31df784a973b36d8687d615ef6a7e9b1809917a9b",
"sha256:db9814ff0457b46f2e1d494c1efa4111ca089e08c8b983635ebffb9c1573361f",
"sha256:df04f4bad8a359daa2ff74f8108ea051670cafbca533bb2636c58b16e962989e",
"sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61",
"sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e"
],
"index": "pypi",
"version": "==1.15.4"
},
"pandas": {
"hashes": [
"sha256:11975fad9edbdb55f1a560d96f91830e83e29bed6ad5ebf506abda09818eaf60",
"sha256:12e13d127ca1b585dd6f6840d3fe3fa6e46c36a6afe2dbc5cb0b57032c902e31",
"sha256:1c87fcb201e1e06f66e23a61a5fea9eeebfe7204a66d99df24600e3f05168051",
"sha256:242e9900de758e137304ad4b5663c2eff0d798c2c3b891250bd0bd97144579da",
"sha256:26c903d0ae1542890cb9abadb4adcb18f356b14c2df46e4ff657ae640e3ac9e7",
"sha256:2e1e88f9d3e5f107b65b59cd29f141995597b035d17cc5537e58142038942e1a",
"sha256:31b7a48b344c14691a8e92765d4023f88902ba3e96e2e4d0364d3453cdfd50db",
"sha256:4fd07a932b4352f8a8973761ab4e84f965bf81cc750fb38e04f01088ab901cb8",
"sha256:5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4",
"sha256:647b3b916cc8f6aeba240c8171be3ab799c3c1b2ea179a3be0bd2712c4237553",
"sha256:66b060946046ca27c0e03e9bec9bba3e0b918bafff84c425ca2cc2e157ce121e",
"sha256:6efa9fa6e1434141df8872d0fa4226fc301b17aacf37429193f9d70b426ea28f",
"sha256:be4715c9d8367e51dbe6bc6d05e205b1ae234f0dc5465931014aa1c4af44c1ba",
"sha256:bea90da782d8e945fccfc958585210d23de374fa9294a9481ed2abcef637ebfc",
"sha256:d318d77ab96f66a59e792a481e2701fba879e1a453aefeebdb17444fe204d1ed",
"sha256:d785fc08d6f4207437e900ffead930a61e634c5e4f980ba6d3dc03c9581748c7",
"sha256:de9559287c4fe8da56e8c3878d2374abc19d1ba2b807bfa7553e912a8e5ba87c",
"sha256:f4f98b190bb918ac0bc0e3dd2ab74ff3573da9f43106f6dba6385406912ec00f",
"sha256:f71f1a7e2d03758f6e957896ed696254e2bc83110ddbc6942018f1a232dd9dad",
"sha256:fb944c8f0b0ab5c1f7846c686bc4cdf8cde7224655c12edcd59d5212cd57bec0"
],
"index": "pypi",
"version": "==0.23.4"
},
"pandocfilters": {
"hashes": [
"sha256:b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9"
],
"version": "==1.4.2"
},
"parso": {
"hashes": [
"sha256:895c63e93b94ac1e1690f5fdd40b65f07c8171e3e53cbd7793b5b96c0e0a7f24"
],
"version": "==0.3.1"
},
"pickleshare": {
"hashes": [
"sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca",
"sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"
],
"version": "==0.7.5"
},
"plotly": {
"hashes": [
"sha256:53c647fdb28590de838678029f7d8fdc42f5ba4643d13c2afd2c4e4d56e18426",
"sha256:5dc85bde91bc80fa05f0d89e9f3a8eaee735b2b404047266874e0ff9c104407f"
],
"index": "pypi",
"version": "==3.4.1"
},
"prometheus-client": {
"hashes": [
"sha256:046cb4fffe75e55ff0e6dfd18e2ea16e54d86cc330f369bebcc683475c8b68a9"
],
"version": "==0.4.2"
},
"prompt-toolkit": {
"hashes": [
"sha256:c1d6aff5252ab2ef391c2fe498ed8c088066f66bc64a8d5c095bbf795d9fec34",
"sha256:d4c47f79b635a0e70b84fdb97ebd9a274203706b1ee5ed44c10da62755cf3ec9",
"sha256:fd17048d8335c1e6d5ee403c3569953ba3eb8555d710bfc548faf0712666ea39"
],
"version": "==2.0.7"
},
"pygments": {
"hashes": [
"sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d",
"sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
],
"version": "==2.2.0"
},
"pyparsing": {
"hashes": [
"sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b",
"sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592"
],
"version": "==2.3.0"
},
"pyproj": {
"file": "https://download.lfd.uci.edu/pythonlibs/h2ufg7oq/pyproj-1.9.5.1-cp37-cp37m-win_amd64.whl",
"hashes": [
"sha256:2b8d0e937e1fa28b65bb351930ab2df9b5bd78e4cc953f7a5a415ff206a3acde"
],
"index": "pypi",
"version": "==1.9.5.1"
},
"pyrsistent": {
"hashes": [
"sha256:05910b7ff43cec0a853c15da0bfaf2867faa95f29b08e71f5846a195f1f38c75"
],
"version": "==0.14.7"
},
"python-dateutil": {
"hashes": [
"sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
"sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
],
"version": "==2.7.5"
},
"pytz": {
"hashes": [
"sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca",
"sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6"
],
"version": "==2018.7"
},
"pywinpty": {
"hashes": [
"sha256:79f2b4584111e36826e587d33eb4e7416a12ae1d6c094cb554e873c5c162fa5f"
],
"markers": "os_name == 'nt'",
"version": "==0.5.4"
},
"pyyaml": {
"hashes": [
"sha256:254bf6fda2b7c651837acb2c718e213df29d531eebf00edb54743d10bcb694eb",
"sha256:3108529b78577327d15eec243f0ff348a0640b0c3478d67ad7f5648f93bac3e2",
"sha256:3c17fb92c8ba2f525e4b5f7941d850e7a48c3a59b32d331e2502a3cdc6648e76",
"sha256:8d6d96001aa7f0a6a4a95e8143225b5d06e41b1131044913fecb8f85a125714b",
"sha256:c8a88edd93ee29ede719080b2be6cb2333dfee1dccba213b422a9c8e97f2967b"
],
"version": "==4.2b4"
},
"pyzmq": {
"hashes": [
"sha256:25a0715c8f69cf72f67cfe5a68a3f3ed391c67c063d2257bec0fe7fc2c7f08f8",
"sha256:2bab63759632c6b9e0d5bf19cc63c3b01df267d660e0abcf230cf0afaa966349",
"sha256:30ab49d99b24bf0908ebe1cdfa421720bfab6f93174e4883075b7ff38cc555ba",
"sha256:32c7ca9fc547a91e3c26fc6080b6982e46e79819e706eb414dd78f635a65d946",
"sha256:41219ae72b3cc86d97557fe5b1ef5d1adc1057292ec597b50050874a970a39cf",
"sha256:4b8c48a9a13cea8f1f16622f9bd46127108af14cd26150461e3eab71e0de3e46",
"sha256:55724997b4a929c0d01b43c95051318e26ddbae23565018e138ae2dc60187e59",
"sha256:65f0a4afae59d4fc0aad54a917ab599162613a761b760ba167d66cc646ac3786",
"sha256:6f88591a8b246f5c285ee6ce5c1bf4f6bd8464b7f090b1333a446b6240a68d40",
"sha256:75022a4c60dcd8765bb9ca32f6de75a0ec83b0d96e0309dc479f4c7b21f26cb7",
"sha256:76ea493bfab18dcb090d825f3662b5612e2def73dffc196d51a5194b0294a81d",
"sha256:7b60c045b80709e4e3c085bab9b691e71761b44c2b42dbb047b8b498e7bc16b3",
"sha256:8e6af2f736734aef8ed6f278f9f552ec7f37b1a6b98e59b887484a840757f67d",
"sha256:9ac2298e486524331e26390eac14e4627effd3f8e001d4266ed9d8f1d2d31cce",
"sha256:9ba650f493a9bc1f24feca1d90fce0e5dd41088a252ac9840131dfbdbf3815ca",
"sha256:a02a4a385e394e46012dc83d2e8fd6523f039bb52997c1c34a2e0dd49ed839c1",
"sha256:a3ceee84114d9f5711fa0f4db9c652af0e4636c89eabc9b7f03a3882569dd1ed",
"sha256:a72b82ac1910f2cf61a49139f4974f994984475f771b0faa730839607eeedddf",
"sha256:ab136ac51027e7c484c53138a0fab4a8a51e80d05162eb7b1585583bcfdbad27",
"sha256:c095b224300bcac61e6c445e27f9046981b1ac20d891b2f1714da89d34c637c8",
"sha256:c5cc52d16c06dc2521340d69adda78a8e1031705924e103c0eb8fc8af861d810",
"sha256:d612e9833a89e8177f8c1dc68d7b4ff98d3186cd331acd616b01bbdab67d3a7b",
"sha256:e828376a23c66c6fe90dcea24b4b72cd774f555a6ee94081670872918df87a19",
"sha256:e9767c7ab2eb552796440168d5c6e23a99ecaade08dda16266d43ad461730192",
"sha256:ebf8b800d42d217e4710d1582b0c8bff20cdcb4faad7c7213e52644034300924"
],
"version": "==17.1.2"
},
"qtconsole": {
"hashes": [
"sha256:1ac4a65e81a27b0838330a6d351c2f8435d4013d98a95373e8a41119b2968390",
"sha256:bc1ba15f50c29ed50f1268ad823bb6543be263c18dd093b80495e9df63b003ac"
],
"version": "==4.4.3"
},
"requests": {
"hashes": [
"sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54",
"sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263"
],
"version": "==2.20.1"
},
"retrying": {
"hashes": [
"sha256:08c039560a6da2fe4f2c426d0766e284d3b736e355f8dd24b37367b0bb41973b"
],
"version": "==1.3.3"
},
"scipy": {
"hashes": [
"sha256:0611ee97296265af4a21164a5323f8c1b4e8e15c582d3dfa7610825900136bb7",
"sha256:08237eda23fd8e4e54838258b124f1cd141379a5f281b0a234ca99b38918c07a",
"sha256:0e645dbfc03f279e1946cf07c9c754c2a1859cb4a41c5f70b25f6b3a586b6dbd",
"sha256:0e9bb7efe5f051ea7212555b290e784b82f21ffd0f655405ac4f87e288b730b3",
"sha256:108c16640849e5827e7d51023efb3bd79244098c3f21e4897a1007720cb7ce37",
"sha256:340ef70f5b0f4e2b4b43c8c8061165911bc6b2ad16f8de85d9774545e2c47463",
"sha256:3ad73dfc6f82e494195144bd3a129c7241e761179b7cb5c07b9a0ede99c686f3",
"sha256:3b243c77a822cd034dad53058d7c2abf80062aa6f4a32e9799c95d6391558631",
"sha256:404a00314e85eca9d46b80929571b938e97a143b4f2ddc2b2b3c91a4c4ead9c5",
"sha256:423b3ff76957d29d1cce1bc0d62ebaf9a3fdfaf62344e3fdec14619bb7b5ad3a",
"sha256:42d9149a2fff7affdd352d157fa5717033767857c11bd55aa4a519a44343dfef",
"sha256:625f25a6b7d795e8830cb70439453c9f163e6870e710ec99eba5722775b318f3",
"sha256:698c6409da58686f2df3d6f815491fd5b4c2de6817a45379517c92366eea208f",
"sha256:729f8f8363d32cebcb946de278324ab43d28096f36593be6281ca1ee86ce6559",
"sha256:8190770146a4c8ed5d330d5b5ad1c76251c63349d25c96b3094875b930c44692",
"sha256:878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1",
"sha256:8b984f0821577d889f3c7ca8445564175fb4ac7c7f9659b7c60bef95b2b70e76",
"sha256:8f841bbc21d3dad2111a94c490fb0a591b8612ffea86b8e5571746ae76a3deac",
"sha256:c22b27371b3866c92796e5d7907e914f0e58a36d3222c5d436ddd3f0e354227a",
"sha256:d0cdd5658b49a722783b8b4f61a6f1f9c75042d0e29a30ccb6cacc9b25f6d9e2",
"sha256:d40dc7f494b06dcee0d303e51a00451b2da6119acbeaccf8369f2d29e28917ac",
"sha256:d8491d4784aceb1f100ddb8e31239c54e4afab8d607928a9f7ef2469ec35ae01",
"sha256:dfc5080c38dde3f43d8fbb9c0539a7839683475226cf83e4b24363b227dfe552",
"sha256:e24e22c8d98d3c704bb3410bce9b69e122a8de487ad3dbfe9985d154e5c03a40",
"sha256:e7a01e53163818d56eabddcafdc2090e9daba178aad05516b20c6591c4811020",
"sha256:ee677635393414930541a096fc8e61634304bb0153e4e02b75685b11eba14cae",
"sha256:f0521af1b722265d824d6ad055acfe9bd3341765735c44b5a4d0069e189a0f40",
"sha256:f25c281f12c0da726c6ed00535ca5d1622ec755c30a3f8eafef26cf43fede694"
],
"index": "pypi",
"version": "==1.1.0"
},
"send2trash": {
"hashes": [
"sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2",
"sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"
],
"version": "==1.5.0"
},
"shapely": {
"hashes": [
"sha256:045e991636787c22bf3e18b57cdaa200681acc0e5db0720123643909d99ad32b",
"sha256:2e8398aacf67cfdfcd64154738c809fea52008afefb4704103f43face369230d",
"sha256:56b8184ef9cf2e2e1dd09ccfe341028af08ea57254524c9458e7f115655385af",
"sha256:7268fd767dc88ef083a528a1e8977a358c7a56cb349aae9e4c36913cfba30857",
"sha256:7e06705e0a20e10f0ce35b233b32b57f6b77044e58e2ad4023d6e64f6c3719a7",
"sha256:937502b7f7bfea39910e30617a30d74ce1b6585895b3d8a2a4602c223a0dd73c",
"sha256:99dc867fe6519c1af1840cceea8bcf5dd1ece077207bdcb19072cdb4fbda8584",
"sha256:9e45485c49fd9ee81a81be756e648a0c1c125e770e3ed42845350d75a46723ad",
"sha256:e3c3eb85f7d4308ccbfcdd23513bfe201b193673c98400219b9a480b903b3033",
"sha256:eb4f295b1ff558857d8061ff7716b1e10ec3c24b5b784bccb51dc87e6fd3ad07",
"sha256:f87c677c0b176827167d1ebad37bba36a9e6baf61f608ff8ef4b9d9ff002c3c3",
"sha256:ffe14cf22da9c95aa87a287ddb96202e3cbb4ec1ec862050d9e4b114307fa206"
],
"index": "pypi",
"version": "==1.7a1"
},
"six": {
"hashes": [
"sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
],
"version": "==1.11.0"
},
"terminado": {
"hashes": [
"sha256:55abf9ade563b8f9be1f34e4233c7b7bde726059947a593322e8a553cc4c067a",
"sha256:65011551baff97f5414c67018e908110693143cfbaeb16831b743fe7cad8b927"
],
"version": "==0.8.1"
},
"testpath": {
"hashes": [
"sha256:46c89ebb683f473ffe2aab0ed9f12581d4d078308a3cb3765d79c6b2317b0109",
"sha256:b694b3d9288dbd81685c5d2e7140b81365d46c29f5db4bc659de5aa6b98780f8"
],
"version": "==0.4.2"
},
"toml": {
"hashes": [
"sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
"sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
],
"version": "==0.10.0"
},
"tornado": {
"hashes": [
"sha256:0662d28b1ca9f67108c7e3b77afabfb9c7e87bde174fbda78186ecedc2499a9d",
"sha256:4e5158d97583502a7e2739951553cbd88a72076f152b4b11b64b9a10c4c49409",
"sha256:732e836008c708de2e89a31cb2fa6c0e5a70cb60492bee6f1ea1047500feaf7f",
"sha256:8154ec22c450df4e06b35f131adc4f2f3a12ec85981a203301d310abf580500f",
"sha256:8e9d728c4579682e837c92fdd98036bd5cdefa1da2aaf6acf26947e6dd0c01c5",
"sha256:d4b3e5329f572f055b587efc57d29bd051589fb5a43ec8898c77a47ec2fa2bbb",
"sha256:e5f2585afccbff22390cddac29849df463b252b711aa2ce7c5f3f342a5b3b444"
],
"version": "==5.1.1"
},
"traitlets": {
"hashes": [
"sha256:9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835",
"sha256:c6cb5e6f57c5a9bdaa40fa71ce7b4af30298fbab9ece9815b5d995ab6217c7d9"
],
"version": "==4.3.2"
},
"urllib3": {
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24.1"
},
"wcwidth": {
"hashes": [
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
],
"version": "==0.1.7"
},
"webencodings": {
"hashes": [
"sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"
],
"version": "==0.5.1"
},
"widgetsnbextension": {
"hashes": [
"sha256:14b2c65f9940c9a7d3b70adbe713dbd38b5ec69724eebaba034d1036cf3d4740",
"sha256:fa618be8435447a017fd1bf2c7ae922d0428056cfc7449f7a8641edf76b48265"
],
"version": "==3.4.2"
}
},
"develop": {}
}

@ -7,19 +7,47 @@ structure where possible. The analysis is done in python (look at the `/src/` fo
Development is conducted using a [gitflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow) approach. The `master` branch stores the officialrelease history and the `develop` branch serves as an integration branch for features. Other `hotfix` and `feature` branches should be created and merged as necessary. Development is conducted using a [gitflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow) approach. The `master` branch stores the officialrelease history and the `develop` branch serves as an integration branch for features. Other `hotfix` and `feature` branches should be created and merged as necessary.
## Where to start? ## How to start?
Check .env
Uses pipenv #### Getting software requirements
1. Clone this repository.
2. Pull data from WRL coastal J drive with `make pull-data`
3. Check out jupyter notebook `./notebooks/01_exploration.ipynb` which has an example of how to import the data and some interactive widgets.
## Requirements
The following requirements are needed to run various bits: The following requirements are needed to run various bits:
- [Python 3.6+](https://conda.io/docs/user-guide/install/windows.html): Used for processing and analysing data. Jupyter notebooks are used for exploratory analyis and communication. - [Anacond](https://www.anaconda.com/download/): Used for processing and analysing data. The Anaconda distribution is used for managing environments and is available for Windows, Mac and Linux. Jupyter notebooks are used for exploratory analyis and communication.
- [QGIS](https://www.qgis.org/en/site/forusers/download): Used for looking at raw LIDAR pre/post storm surveys and extracting dune crests/toes - [QGIS](https://www.qgis.org/en/site/forusers/download): Used for looking at raw LIDAR pre/post storm surveys and extracting dune crests/toes
- [rclone](https://rclone.org/downloads/): Data is not tracked by this repository, but is backed up to a remote Chris Leaman working directory located on the WRL coastal drive. Rclone is used to sync local and remote copies. Ensure rclone.exe is located on your `PATH` environment. - [rclone](https://rclone.org/downloads/): Data is not tracked by this repository, but is backed up to a remote Chris Leaman working directory located on the WRL coastal drive. Rclone is used to sync local and remote copies. Ensure rclone.exe is located on your `PATH` environment.
- [gnuMake](http://gnuwin32.sourceforge.net/packages/make.htm): A list of commands for processing data is provided in the `./Makefile`. Use gnuMake to launch these commands. Ensure make.exe is located on your `PATH` environment. - [gnuMake](http://gnuwin32.sourceforge.net/packages/make.htm): A list of commands for processing data is provided in the `./Makefile`. Use gnuMake to launch these commands. Ensure make.exe is located on your `PATH` environment.
- git
#### Getting the repository
Clone the repository onto into your local environment:
```
git clone http://git.wrl.unsw.edu.au:3000/chrisl/nsw-2016-storm-impact.git
cd nsw-2016-storm-impact
```
#### Getting the python environment set up
Commands for setting up the python environment are provided in the `Makefile`. Simply run the following commands in the repo root directory:
```
make venv-init
make venv-activate
make venv-requirements-install
```
You can see what these commands are actually running by inspecting the `Makefile`.
#### Pull data
The actual raw, interim and processed data are not tracked by the repository as part of good git practices. A copy of the raw data is stored on the WRL Coastal J:\ drive and can be copied using the following command.
```
make pull-data
```
If you have updated the data and want to copy it back to the J:\ drive, use the following command. Note that it is probably not a good idea to modify data stored in `./data/raw`.
```
make push-data
```
#### View notebooks
Jupyter notebooks have been set up to help explore the data. Once you have set up your environment and pulled the data, this is probably a good place to start as you. To run the notebook, use the following command and navigate to the `./notebooks` folder.
```
jupyter notebook
```
## Available data ## Available data
Raw, interim and processed data used in this analysis is kept in the `/data/` folder. Data is not tracked in the repository due to size constraints, but stored locally. A mirror is kept of the coastal folder J drive which you can Raw, interim and processed data used in this analysis is kept in the `/data/` folder. Data is not tracked in the repository due to size constraints, but stored locally. A mirror is kept of the coastal folder J drive which you can
@ -44,4 +72,5 @@ been corrected for systematic errors, so actual elevations should be taken from
- [ ] Implement (bayesian change detection algorithm)[https://github.com/hildensia/bayesian_changepoint_detection] to help detect dune crests and toes from profiles. Probably low priority at the moment since we are doing manual detection. - [ ] Implement (bayesian change detection algorithm)[https://github.com/hildensia/bayesian_changepoint_detection] to help detect dune crests and toes from profiles. Probably low priority at the moment since we are doing manual detection.
- [ ] Implement dune impact calculations as per Palmsten & Holman. Calculation should be done in a new dataframe. - [ ] Implement dune impact calculations as per Palmsten & Holman. Calculation should be done in a new dataframe.
- [ ] Implement data/interim/*.csv file checking using py.test. Check for correct columns, number of nans etc. Testing of code is probably a lower priority than just checking the interim data files at the moment. - [ ] Implement data/interim/*.csv file checking using py.test. Check for correct columns, number of nans etc. Testing of code is probably a lower priority than just checking the interim data files at the moment.
- [ ] Investigate using [modin](https://github.com/modin-project/modin) to help speed up analysis. - [ ] Investigate using [modin](https://github.com/modin-project/modin) to help speed up analysis.
- [ ] Need to think about how relative imports are handled, see [here](https://chrisyeh96.github.io/2017/08/08/definitive-guide-python-imports.html). Maybe the click CLI interface should be moved to the `./src/` folder and it can import all the other packages?

@ -0,0 +1,149 @@
name: C:\Users\z5189959\Desktop\nsw-2016-storm-impact\.venv
channels:
- defaults
- conda-forge
dependencies:
- appdirs=1.4.3=py_1
- attrs=18.2.0=py_0
- backcall=0.1.0=py_0
- black=18.9b0=py_0
- bleach=3.0.2=py_0
- boost=1.66.0=py36_vc14_1
- boost-cpp=1.66.0=vc14_1
- ca-certificates=2018.10.15=ha4d7672_0
- certifi=2018.10.15=py36_1000
- colorama=0.4.0=py_0
- colorlover=0.2.1=py_0
- curl=7.60.0=vc14_0
- entrypoints=0.2.3=py36_1002
- expat=2.2.5=vc14_0
- freetype=2.8.1=vc14_0
- freexl=1.0.2=vc14_2
- geotiff=1.4.2=vc14_1
- hdf4=4.2.13=vc14_0
- hdf5=1.10.1=vc14_2
- icu=58.2=vc14_0
- ipykernel=5.1.0=py36h39e3cac_1001
- ipython=7.1.1=py36h39e3cac_1000
- jedi=0.13.1=py36_1000
- jinja2=2.10=py_1
- jpeg=9b=vc14_2
- jupyter_client=5.2.3=py_1
- jupyter_contrib_core=0.3.3=py_2
- jupyter_contrib_nbextensions=0.5.0=py36_1000
- jupyter_highlight_selected_word=0.2.0=py36_1000
- jupyter_latex_envs=1.4.4=py36_1000
- jupyter_nbextensions_configurator=0.4.0=py36_1000
- kealib=1.4.7=vc14_4
- krb5=1.14.6=vc14_0
- libgdal=2.2.4=vc14_5
- libiconv=1.14=vc14_4
- libnetcdf=4.6.1=vc14_2
- libpng=1.6.34=vc14_0
- libpq=9.6.3=vc14_0
- libsodium=1.0.16=vc14_0
- libspatialite=4.3.0a=vc14_19
- libtiff=4.0.9=vc14_0
- libxml2=2.9.5=vc14_1
- libxslt=1.1.32=vc14_0
- lxml=4.2.3=py36heafd4d3_0
- markupsafe=1.1.0=py36hfa6e2cd_1000
- matplotlib=2.2.2=py36_1
- mistune=0.8.4=py36hfa6e2cd_1000
- nbconvert=5.3.1=py_1
- notebook=5.7.2=py36_1000
- openjpeg=2.3.0=vc14_2
- openssl=1.0.2p=hfa6e2cd_1001
- pandoc=2.4=0
- pandocfilters=1.4.2=py_1
- parso=0.3.1=py_0
- pickleshare=0.7.5=py36_1000
- proj4=4.9.3=vc14_5
- prometheus_client=0.4.2=py_0
- prompt_toolkit=2.0.7=py_0
- pygments=2.2.0=py_1
- python=3.6.6=he025d50_0
- pywinpty=0.5.4=py36_1002
- pyzmq=17.1.2=py36hf576995_1001
- qt=5.6.2=vc14_1
- send2trash=1.5.0=py_0
- sqlite=3.20.1=vc14_2
- terminado=0.8.1=py36_1001
- testpath=0.4.2=py36_1000
- tk=8.6.8=vc14_0
- toml=0.10.0=py_0
- vc=14=0
- wcwidth=0.1.7=py_1
- webencodings=0.5.1=py_1
- winpty=0.4.3=4
- xerces-c=3.2.0=vc14_0
- yaml=0.1.7=vc14_0
- zeromq=4.2.5=vc14_2
- zlib=1.2.11=vc14_0
- asn1crypto=0.24.0=py36_0
- blas=1.0=mkl
- cffi=1.11.5=py36h74b6da3_1
- chardet=3.0.4=py36_1
- click=7.0=py36_0
- click-plugins=1.0.4=py36_0
- cligj=0.5.0=py36_0
- cryptography=2.3.1=py36h74b6da3_0
- cycler=0.10.0=py36h009560c_0
- decorator=4.3.0=py36_0
- fiona=1.7.10=py36h5bf8d1d_0
- gdal=2.2.2=py36hcebd033_1
- geos=3.6.2=h9ef7328_2
- icc_rt=2017.0.4=h97af966_0
- idna=2.7=py36_0
- intel-openmp=2019.1=144
- ipython_genutils=0.2.0=py36h3c5d0ee_0
- jsonschema=2.6.0=py36h7636477_0
- jupyter_core=4.4.0=py36_0
- kiwisolver=1.0.1=py36h6538335_0
- libboost=1.67.0=hd9e427e_4
- libcurl=7.61.1=h7602738_0
- libkml=1.3.0=he5f2a48_4
- libssh2=1.8.0=hd619d38_4
- m2w64-gcc-libgfortran=5.3.0=6
- m2w64-gcc-libs=5.3.0=7
- m2w64-gcc-libs-core=5.3.0=7
- m2w64-gmp=6.1.0=2
- m2w64-libwinpthread-git=5.0.0.4634.697f757=2
- mkl=2018.0.3=1
- mkl_fft=1.0.6=py36hdbbee80_0
- mkl_random=1.0.1=py36h77b88f5_1
- msys2-conda-epoch=20160418=1
- munch=2.3.2=py36_0
- nbformat=4.4.0=py36h3a5bc1b_0
- numpy=1.15.4=py36ha559c80_0
- numpy-base=1.15.4=py36h8128ebf_0
- pandas=0.23.4=py36h830ac7b_0
- pip=18.1=py36_0
- plotly=3.4.1=py36h28b3542_0
- pycparser=2.19=py36_0
- pyopenssl=18.0.0=py36_0
- pyparsing=2.3.0=py36_0
- pyproj=1.9.5.1=py36_0
- pyqt=5.6.0=py36_2
- pysocks=1.6.8=py36_0
- python-dateutil=2.7.5=py36_0
- pytz=2018.7=py36_0
- pyyaml=3.13=py36hfa6e2cd_0
- requests=2.20.1=py36_0
- retrying=1.3.3=py36_2
- scipy=1.1.0=py36h4f6bf74_1
- setuptools=40.6.2=py36_0
- shapely=1.6.4=py36hc90234e_0
- sip=4.19.8=py36h6538335_0
- six=1.11.0=py36_1
- tornado=5.1.1=py36hfa6e2cd_0
- traitlets=4.3.2=py36h096827d_0
- urllib3=1.23=py36_0
- vs2015_runtime=14.15.26706=h3a45250_0
- wheel=0.32.3=py36_0
- win_inet_pton=1.0.1=py36_1
- wincertstore=0.2=py36h7fe50ca_0
- xz=5.2.4=h2fa13f4_4
- pip:
- mat4py==0.4.1
prefix: C:\Users\z5189959\Desktop\nsw-2016-storm-impact\.venv

File diff suppressed because one or more lines are too long

@ -7,8 +7,7 @@ import numpy.ma as ma
import pandas as pd import pandas as pd
from scipy import stats from scipy import stats
import runup_models
from src.analysis import runup_models
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False) logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -266,6 +265,7 @@ def crossings(profile_x, profile_z, constant_z):
z = np.subtract(profile_z, constant_z) z = np.subtract(profile_z, constant_z)
# Find all indices right before any crossing. # Find all indices right before any crossing.
# TODO Sometimes this can give a runtime warning https://stackoverflow.com/a/36489085
indicies = np.where(z[:-1] * z[1:] < 0)[0] indicies = np.where(z[:-1] * z[1:] < 0)[0]
# Use linear interpolation to find intersample crossings. # Use linear interpolation to find intersample crossings.

@ -48,6 +48,11 @@ def volume_change(df_profiles, df_profile_features, zone):
if np.isnan(prestorm_dune_toe_x): if np.isnan(prestorm_dune_toe_x):
prestorm_dune_toe_x = prestorm_dune_crest_x prestorm_dune_toe_x = prestorm_dune_crest_x
# If no prestorm and poststorm profiles, skip site and continue
profile_lengths = [len(df_site.query("profile_type == '{}'".format(x))) for x in ['prestorm', 'poststorm']]
if any([length ==0 for length in profile_lengths]):
continue
# Find last x coordinate where we have both prestorm and poststorm measurements. If we don't do this, # Find last x coordinate where we have both prestorm and poststorm measurements. If we don't do this,
# the prestorm and poststorm values are going to be calculated over different lengths. # the prestorm and poststorm values are going to be calculated over different lengths.
df_zone = df_site.dropna(subset=["z"]) df_zone = df_site.dropna(subset=["z"])
@ -128,25 +133,26 @@ def storm_regime(df_observed_impacts):
return df_observed_impacts return df_observed_impacts
if __name__ == "__main__": #
logger.info("Importing existing data") # if __name__ == "__main__":
data_folder = "./data/interim" # logger.info("Importing existing data")
df_profiles = pd.read_csv(os.path.join(data_folder, "profiles.csv"), index_col=[0, 1, 2]) # data_folder = "./data/interim"
df_profile_features = pd.read_csv(os.path.join(data_folder, "profile_features.csv"), index_col=[0]) # df_profiles = pd.read_csv(os.path.join(data_folder, "profiles.csv"), index_col=[0, 1, 2])
# df_profile_features = pd.read_csv(os.path.join(data_folder, "profile_features.csv"), index_col=[0])
logger.info("Creating new dataframe for observed impacts") #
df_observed_impacts = pd.DataFrame(index=df_profile_features.index) # logger.info("Creating new dataframe for observed impacts")
# df_observed_impacts = pd.DataFrame(index=df_profile_features.index)
logger.info("Getting pre/post storm volumes") #
df_swash_vol_changes = volume_change(df_profiles, df_profile_features, zone="swash") # logger.info("Getting pre/post storm volumes")
df_dune_face_vol_changes = volume_change(df_profiles, df_profile_features, zone="dune_face") # df_swash_vol_changes = volume_change(df_profiles, df_profile_features, zone="swash")
df_observed_impacts = df_observed_impacts.join([df_swash_vol_changes, df_dune_face_vol_changes]) # df_dune_face_vol_changes = volume_change(df_profiles, df_profile_features, zone="dune_face")
# df_observed_impacts = df_observed_impacts.join([df_swash_vol_changes, df_dune_face_vol_changes])
# Classify regime based on volume changes #
df_observed_impacts = storm_regime(df_observed_impacts) # # Classify regime based on volume changes
# df_observed_impacts = storm_regime(df_observed_impacts)
# Save dataframe to csv #
df_observed_impacts.to_csv(os.path.join(data_folder, "impacts_observed.csv")) # # Save dataframe to csv
# df_observed_impacts.to_csv(os.path.join(data_folder, "impacts_observed.csv"))
@click.command() @click.command()

@ -2,15 +2,20 @@
Converts .csv files to .shape files Converts .csv files to .shape files
""" """
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import click import click
import fiona import fiona
import pandas as pd import pandas as pd
from fiona.crs import from_epsg from fiona.crs import from_epsg
from shapely.geometry import Point, mapping from shapely.geometry import Point, mapping
import logging.config
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False) from utils import setup_logging
logger = logging.getLogger(__name__)
logger = setup_logging()
@click.command() @click.command()
@ -25,11 +30,11 @@ def sites_csv_to_shp(input_csv, output_shp):
""" """
logger.info("Converting %s to %s", input_csv, output_shp) logger.info("Converting %s to %s", input_csv, output_shp)
df_sites = pd.read_csv(input_csv, index_col=[0]) df_sites = pd.read_csv(input_csv, index_col=[0])
logger.info(os.environ.get("GDAL_DATA", None))
schema = {"geometry": "Point", "properties": {"beach": "str", "site_id": "str"}} schema = {"geometry": "Point", "properties": {"beach": "str", "site_id": "str"}}
with fiona.open(output_shp, "w", crs=from_epsg(4326), driver="ESRI Shapefile", schema=schema) as output: with fiona.open(output_shp, "w", crs=from_epsg(4326), driver="ESRI Shapefile", schema=schema) as output:
for index, row in df_sites.iterrows(): for index, row in df_sites.iterrows():
point = Point(row["lon"], row["lat"]) point = Point(row["x_200_lon"], row["x_200_lat"])
prop = {"beach": row["beach"], "site_id": index} prop = {"beach": row["beach"], "site_id": index}
output.write({"geometry": mapping(point), "properties": prop}) output.write({"geometry": mapping(point), "properties": prop})
logger.info("Done!") logger.info("Done!")

@ -2,15 +2,25 @@
Converts raw .mat files into a flattened .csv structure which can be imported into python pandas. Converts raw .mat files into a flattened .csv structure which can be imported into python pandas.
""" """
import logging.config import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from datetime import datetime, timedelta from datetime import datetime, timedelta
import math
import click import click
import numpy as np
import pandas as pd import pandas as pd
from mat4py import loadmat from mat4py import loadmat
import numpy as np from shapely.geometry import Point
from profile_features import convert_coord_systems
from utils import setup_logging
logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False) logger = setup_logging()
logger = logging.getLogger(__name__)
def parse_orientations(orientations_mat): def parse_orientations(orientations_mat):
@ -134,7 +144,7 @@ def parse_tides(tides_mat):
return df return df
def parse_profiles(profiles_mat): def parse_profiles_and_sites(profiles_mat):
""" """
Parses the raw profiles.mat file and returns a pandas dataframe Parses the raw profiles.mat file and returns a pandas dataframe
:param tides_mat: :param tides_mat:
@ -142,39 +152,108 @@ def parse_profiles(profiles_mat):
""" """
logger.info("Parsing %s", profiles_mat) logger.info("Parsing %s", profiles_mat)
mat_data = loadmat(profiles_mat)["data"] mat_data = loadmat(profiles_mat)["data"]
rows = [] profile_rows = []
for i in range(0, len(mat_data["site"])): site_rows = []
for j in range(0, len(mat_data["pfx"][i])): site_counter = 0
for profile_type in ["prestorm", "poststorm"]:
for i, site in enumerate(mat_data["site"]):
if profile_type == "prestorm":
z = mat_data["pf1"][i][j][0] # Give each site a unique id
if profile_type == "poststorm": if len(site_rows) == 0 or site_rows[-1]["beach"] != site:
z = mat_data["pf2"][i][j][0] site_counter = 1
else:
rows.append( site_counter += 1
{ site_id = "{}{:04d}".format(site, site_counter)
"beach": mat_data["site"][i],
"lon": mat_data["lon"][i], # Initalize location of x=200m latitude and longitude
"lat": mat_data["lat"][i], x_200_lat = np.nan
"profile_type": profile_type, x_200_lon = np.nan
"x": mat_data["pfx"][i][j][0],
"z": z, # Want to calculation the orientation
} orientation = {}
)
for x, lat, lon, z_prestorm, z_poststorm, easting, northing in zip(
mat_data["x"][i],
mat_data["lats"][i],
mat_data["lons"][i],
mat_data["Zpre"][i],
mat_data["Zpost"][i],
mat_data["eastings"][i],
mat_data["northings"][i],
):
# Only extract pre and post storm profile
for j, profile_type in enumerate(["prestorm", "poststorm"]):
if mat_data["isgood"][i][j] == 1:
land_lim = mat_data["landlims"][i][j]
survey_datetime = matlab_datenum_to_datetime(mat_data["surveydates"][i][j])
if profile_type == "prestorm":
z = z_prestorm
else:
z = z_poststorm
# Keep a record of the where the center of the profile is located, and the locations of the land
# and sea
# TODO: This code isn't very transferrable. What if we don't have lat/lons at 200 m? Relook at this
if x[0] == 200:
x_200_lat = lat[0]
x_200_lon = lon[0]
elif x[0] == 0:
orientation["land_easting"] = easting[0]
orientation["land_northing"] = northing[0]
elif x[0] == 400:
orientation["sea_easting"] = easting[0]
orientation["sea_northing"] = northing[0]
profile_rows.append(
{
"site_id": site_id,
"lon": lon[0],
"lat": lat[0],
"profile_type": profile_type,
"x": x[0],
"z": z[0],
"land_lim": land_lim,
"survey_datetime": survey_datetime,
}
)
orientation = math.degrees(
math.atan2(
orientation["land_northing"] - orientation["sea_northing"],
orientation["land_easting"] - orientation["sea_easting"],
)
)
site_rows.append(
{
"site_id": site_id,
"beach": site,
"lat": x_200_lat,
"lon": x_200_lon,
"orientation": orientation,
"profile_x_lat_lon": 200,
}
)
df = pd.DataFrame(rows) df_profiles = pd.DataFrame(profile_rows)
return df df_sites = pd.DataFrame(site_rows)
logger.info("Parsed profiles and sites")
return df_profiles, df_sites
def remove_zeros(df_profiles): def remove_zeros(df_profiles):
""" """
When parsing the pre/post storm profiles, the end of some profiles have constant values of zero. Let's change When parsing the pre/post storm profiles, the end of some profiles have constant values of zero. Let's change
these to NaNs for consistancy. Didn't use pandas fillnan because 0 may still be a valid value. these to NaNs for consistancy. Didn't use pandas fillnan because 0 may still be a valid value.
:param df: :param df_profiles:
:return: :return:
""" """
logger.info("Removing zeros from end of profiles")
df_profiles = df_profiles.sort_index() df_profiles = df_profiles.sort_index()
groups = df_profiles.groupby(level=["site_id", "profile_type"]) groups = df_profiles.groupby(level=["site_id", "profile_type"])
for key, _ in groups: for key, _ in groups:
@ -185,6 +264,7 @@ def remove_zeros(df_profiles):
df_profile = df_profiles[idx_site] df_profile = df_profiles[idx_site]
x_last_ele = df_profile[df_profile.z != 0].index.get_level_values("x")[-1] x_last_ele = df_profile[df_profile.z != 0].index.get_level_values("x")[-1]
df_profiles.loc[idx_site & (df_profiles.index.get_level_values("x") > x_last_ele), "z"] = np.nan df_profiles.loc[idx_site & (df_profiles.index.get_level_values("x") > x_last_ele), "z"] = np.nan
logger.info("Removed zeros from end of profiles")
return df_profiles return df_profiles
@ -198,31 +278,7 @@ def matlab_datenum_to_datetime(matlab_datenum):
return datetime.fromordinal(int(matlab_datenum)) + timedelta(days=matlab_datenum % 1) - timedelta(days=366) return datetime.fromordinal(int(matlab_datenum)) + timedelta(days=matlab_datenum % 1) - timedelta(days=366)
def get_unique_sites(dfs, cols=["beach", "lat", "lon"]): def replace_unique_sites(df, df_sites):
"""
Generates a dataframe of unique sites based on beach names, lats and lons. Creates a unique site ID for each.
:param dfs:
:param cols:
:return:
"""
rows = []
df_all = pd.concat([df[cols] for df in dfs])
beach_groups = df_all.groupby(["beach"])
for beach_name, beach_group in beach_groups:
site_groups = beach_group.groupby(["lat", "lon"])
siteNo = 1
for site_name, site_group in site_groups:
site = "{}{:04d}".format(beach_name, siteNo)
rows.append({"site_id": site, "lat": site_name[0], "lon": site_name[1], "beach": beach_name})
siteNo += 1
df = pd.DataFrame(rows)
return df
def replace_unique_sites(df, df_sites, cols=["lat", "lon"]):
""" """
Replaces beach/lat/lon columns with the unique site_id Replaces beach/lat/lon columns with the unique site_id
:param dfs: :param dfs:
@ -232,56 +288,37 @@ def replace_unique_sites(df, df_sites, cols=["lat", "lon"]):
# Make the sites index a column, so it can be merged into df # Make the sites index a column, so it can be merged into df
df_sites["site_id"] = df_sites.index.get_level_values("site_id") df_sites["site_id"] = df_sites.index.get_level_values("site_id")
# Merging on a float can lead to subtle bugs. Lets convert lat/lons to integers and merge on that instead # Create eastings and northings so we can calculate distances
precision = 8 site_points = [convert_coord_systems(Point(lon, lat)).xy for lon, lat in zip(df_sites["lon"], df_sites["lat"])]
df_sites["lat_int"] = np.round(df_sites["lat"] * 10 ** precision).astype(np.int64) df_sites["easting"] = [x[0][0] for x in site_points]
df_sites["lon_int"] = np.round(df_sites["lon"] * 10 ** precision).astype(np.int64) df_sites["northing"] = [x[1][0] for x in site_points]
df["lat_int"] = np.round(df["lat"] * 10 ** precision).astype(np.int64)
df["lon_int"] = np.round(df["lon"] * 10 ** precision).astype(np.int64)
df_merged = df.merge(df_sites, on=["lat_int", "lon_int"]) # Process each unique combination lat/lons in groups
groups = df.groupby(["lat", "lon"])
for (lat, lon), df_group in groups:
# Check that all our records have a unique site identifier # Calculate distances from each point to each site and determine closest site
n_unmatched = len(df) - len(df_merged) easting, northing = [x[0] for x in convert_coord_systems(Point(lon, lat)).xy]
if n_unmatched > 0: distances_to_sites = np.sqrt((df_sites["easting"] - easting) ** 2 + (df_sites["northing"] - northing) ** 2)
logger.warning("Not all records (%d of %d) matched with a unique site", n_unmatched, len(df)) min_distance = distances_to_sites.min()
closest_site = distances_to_sites.idxmin()
df_merged = df_merged.drop(
columns=[
"lat_x",
"lon_x",
"lat_int",
"lon_int",
"beach_y",
"beach_x",
"lat_y",
"lon_y",
"orientation",
"profile_x_lat_lon",
]
)
return df_merged # Do some logging so we can check later.
if min_distance > 1:
logger.warning("Closest site to (%.4f,%.4f) is %s (%.2f m away)", lat, lon, closest_site, min_distance)
else:
logger.info("Closest site to (%.4f,%.4f) is %s (%.2f m away)", lat, lon, closest_site, min_distance)
# Assign site_id based on closest site
df.loc[df_group.index, "site_id"] = closest_site
@click.command(short_help="create sites.csv") nan_count = df.site_id.isna().sum()
@click.option("--waves-mat", required=True, help=".mat file containing wave records") if nan_count > 0:
@click.option("--tides-mat", required=True, help=".mat file containing tide records") logger.warning("Not all records (%d of %d) matched with a unique site", nan_count, len(df))
@click.option("--profiles-mat", required=True, help=".mat file containing beach profiles")
@click.option("--orientations-mat", required=True, help=".mat file containing orientation of beach profiles") df = df.drop(columns=["lat", "lon", "beach"])
@click.option("--output-file", required=True, help="where to save sites.csv")
def create_sites_csv(waves_mat, tides_mat, profiles_mat, orientations_mat, output_file): return df
logger.info("Creating %s", output_file)
df_waves = parse_waves(waves_mat=waves_mat)
df_tides = parse_tides(tides_mat=tides_mat)
df_profiles = parse_profiles(profiles_mat=profiles_mat)
df_orientations = parse_orientations(orientations_mat=orientations_mat)
df_sites = get_unique_sites(dfs=[df_waves, df_tides, df_profiles])
df_sites = combine_sites_and_orientaions(df_sites, df_orientations)
df_sites = specify_lat_lon_profile_center(df_sites)
df_sites.set_index(["site_id"], inplace=True)
df_sites.to_csv(output_file)
logger.info("Created %s", output_file)
@click.command(short_help="create waves.csv") @click.command(short_help="create waves.csv")
@ -301,17 +338,22 @@ def create_waves_csv(waves_mat, sites_csv, output_file):
@click.command(short_help="create profiles.csv") @click.command(short_help="create profiles.csv")
@click.option("--profiles-mat", required=True, help=".mat file containing beach profiles") @click.option("--profiles-mat", required=True, help=".mat file containing beach profiles")
@click.option("--sites-csv", required=True, help=".csv file description of cross section sites") @click.option("--profiles-output-file", required=True, help="where to save profiles.csv")
@click.option("--output-file", required=True, help="where to save profiles.csv") @click.option("--sites-output-file", required=True, help="where to save sites.csv")
def create_profiles_csv(profiles_mat, sites_csv, output_file): def create_sites_and_profiles_csv(profiles_mat, profiles_output_file, sites_output_file):
logger.info("Creating %s", output_file) logger.info("Creating sites and profiles csvs")
df_profiles = parse_profiles(profiles_mat=profiles_mat) df_profiles, df_sites = parse_profiles_and_sites(profiles_mat=profiles_mat)
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profiles = replace_unique_sites(df_profiles, df_sites)
df_profiles.set_index(["site_id", "profile_type", "x"], inplace=True) df_profiles.set_index(["site_id", "profile_type", "x"], inplace=True)
df_profiles.sort_index(inplace=True) df_profiles.sort_index(inplace=True)
df_profiles.to_csv(output_file) df_profiles = remove_zeros(df_profiles)
logger.info("Created %s", output_file)
df_sites.set_index(["site_id"], inplace=True)
df_sites.sort_index(inplace=True)
df_profiles.to_csv(profiles_output_file)
logger.info("Created %s", profiles_output_file)
df_sites.to_csv(sites_output_file)
logger.info("Created %s", sites_output_file)
@click.command(short_help="create profiles.csv") @click.command(short_help="create profiles.csv")
@ -335,8 +377,7 @@ def cli():
if __name__ == "__main__": if __name__ == "__main__":
cli.add_command(create_sites_csv)
cli.add_command(create_waves_csv) cli.add_command(create_waves_csv)
cli.add_command(create_profiles_csv) cli.add_command(create_sites_and_profiles_csv)
cli.add_command(create_tides_csv) cli.add_command(create_tides_csv)
cli() cli()

@ -15,22 +15,6 @@ logging.config.fileConfig("./src/logging.conf", disable_existing_loggers=False)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def shapes_from_shp(shp_file):
"""
Parses a shape file and returns a list of shapely shapes, ids and properties
:param shp_file:
:return:
"""
shapes = []
ids = []
properties = []
for feat in fiona.open(shp_file, "r"):
shapes.append(shape(feat["geometry"]))
ids.append(feat["id"])
properties.append(feat["properties"])
return shapes, ids, properties
def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPSG:28356"): def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPSG:28356"):
""" """
Converts coordinates from one coordinates system to another. Needed because shapefiles are usually defined in Converts coordinates from one coordinates system to another. Needed because shapefiles are usually defined in
@ -50,6 +34,22 @@ def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPS
return g2 return g2
def shapes_from_shp(shp_file):
"""
Parses a shape file and returns a list of shapely shapes, ids and properties
:param shp_file:
:return:
"""
shapes = []
ids = []
properties = []
for feat in fiona.open(shp_file, "r"):
shapes.append(shape(feat["geometry"]))
ids.append(feat["id"])
properties.append(feat["properties"])
return shapes, ids, properties
def distance_to_intersection(lat, lon, landward_orientation, beach, line_strings, line_properties): def distance_to_intersection(lat, lon, landward_orientation, beach, line_strings, line_properties):
""" """
Returns the distance at whjch a line drawn from a lat/lon at an orientation intersects a line stinrg Returns the distance at whjch a line drawn from a lat/lon at an orientation intersects a line stinrg
@ -143,7 +143,7 @@ def parse_profile_features(df_sites, df_profiles, dune_crest_shp, dune_toe_shp):
lambda row: beach_profile_elevation(row["{}_x".format(feat)], df_profiles, "prestorm", row.name), axis=1 lambda row: beach_profile_elevation(row["{}_x".format(feat)], df_profiles, "prestorm", row.name), axis=1
) )
df_profile_features = df_profile_features.drop(columns=["beach", "lat", "lon", "orientation"]) df_profile_features = df_profile_features.drop(columns=["beach", "lat", "lon", "orientation", "profile_x_lat_lon"])
return df_profile_features return df_profile_features

@ -0,0 +1,50 @@
---
version: 1
disable_existing_loggers: False
formatters:
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
level: DEBUG
formatter: simple
stream: ext://sys.stdout
info_file_handler:
class: logging.handlers.RotatingFileHandler
level: INFO
formatter: simple
filename: info.log
maxBytes: 10485760 # 10MB
backupCount: 3
encoding: utf8
warning_file_handler:
class: logging.handlers.RotatingFileHandler
level: WARNING
formatter: simple
filename: warning.log
maxBytes: 10485760 # 10MB
backupCount: 3
encoding: utf8
error_file_handler:
class: logging.handlers.RotatingFileHandler
level: ERROR
formatter: simple
filename: error.log
maxBytes: 10485760 # 10MB
backupCount: 3
encoding: utf8
loggers:
my_module:
level: ERROR
handlers: [console]
propagate: no
root:
level: INFO
handlers: [console, info_file_handler, error_file_handler, warning_file_handler]

@ -0,0 +1,16 @@
import logging.config
import os
import yaml
def setup_logging(path="./src/logging.yaml", default_level=logging.INFO):
"""
Setup logging configuration
"""
if os.path.exists(path):
with open(path, "rt") as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
return logging.getLogger(__name__)
Loading…
Cancel
Save