You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

179 lines
7.6 KiB
Makefile

###############################
# Load environment variables
include .env
export $(shell sed 's/=.*//' .env)
CURRENT_DIR = $(shell pwd)
###############################
# Create python virtual environment
. PHONY: venv_init
venv-init: ##@environment Setup virtual environment
pip install pipenv
pipenv --python 3.7
pipenv install
###############################
# Get data from network drive
push-data: ##@data Copies data from ./data/ to data backup directory
rclone copy ./data/ $(DATA_BACKUP_DIR) --exclude "*.las" --progress
pull-data: ##@data Copies data from data backup directory to ./data/
# We probably don't want to pull the raw LIDAR .las files, so lets exclude them
rclone copy $(DATA_BACKUP_DIR) ./data/ --exclude "*.las" --progress
###############################
# Process data
.PHONY: process-mat
process-mat: ./data/interim/sites.csv ./data/interim/waves.csv ./data/interim/profiles.csv ./data/interim/tides.csv ##@data Process all .mat to .csv
# Calculates beach orientations at each profile
./data/raw/processed_shorelines/orientations.mat: ./data/raw/processed_shorelines/profiles.mat
$(MATLAB_PATH) -nosplash -r "cd $(CURRENT_DIR); run('./src/data/beach_orientations.m'); quit"
# Produces a .csv of sites where our beach cross-sections are located
./data/interim/sites.csv: ./data/raw/processed_shorelines/*.mat
pipenv run python ./src/data/parse_mat.py create-sites-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--orientations-mat "./data/raw/processed_shorelines/orientations.mat" \
--output-file "./data/interim/sites.csv"
# Produces a .csv of waves for each site
./data/interim/waves.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/waves.mat
pipenv run python ./src/data/parse_mat.py create-waves-csv \
--waves-mat "./data/raw/processed_shorelines/waves.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/waves.csv"
# Produces a .csv of profiles for each site
./data/interim/profiles.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/profiles.mat
pipenv run python ./src/data/parse_mat.py create-profiles-csv \
--profiles-mat "./data/raw/processed_shorelines/profiles.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/profiles.csv"
# Produces a .csv of tides for each site
./data/interim/tides.csv: ./data/interim/sites.csv ./data/raw/processed_shorelines/tides.mat
pipenv run python ./src/data/parse_mat.py create-tides-csv \
--tides-mat "./data/raw/processed_shorelines/tides.mat" \
--sites-csv "./data/interim/sites.csv" \
--output-file "./data/interim/tides.csv"
# Creates a .shp of our sites to load into QGis
./data/interim/sites.shp: ./data/interim/sites.csv
pipenv run python ./src/data/csv_to_shp.py sites-csv-to-shp \
--input-csv "./data/interim/sites.csv" \
--output-shp "./data/interim/sites.shp"
# Creates a .csv of our dune toe and crest profile features
./data/interim/profile_features.csv: ./data/raw/profile_features/dune_crests.shp ./data/raw/profile_features/dune_toes.shp ./data/interim/sites.csv ./data/interim/profiles.csv
pipenv run python ./src/data/profile_features.py create-profile-features \
--dune-crest-shp "./data/raw/profile_features/dune_crests.shp" \
--dune-toe-shp "./data/raw/profile_features/dune_toes.shp" \
--sites-csv "./data/interim/sites.csv" \
--profiles-csv "./data/interim/profiles.csv" \
--output-csv "./data/interim/profile_features.csv"
# Creates a forecast of twl using sto06 and prestorm time varying prestorm foreshore slope
./data/interim/twl_foreshore_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \
--runup-function "sto06" \
--slope "foreshore" \
--output-file "./data/interim/twl_foreshore_slope_sto06.csv"
# Creates a forecast of twl using sto06 and prestorm mean foreshore slope
./data/interim/twl_mean_slope_sto06.csv: ./data/interim/waves.csv ./data/interim/tides.csv ./data/interim/profiles.csv ./data/interim/sites.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/forecast_twl.py create-twl-forecast \
--waves-csv "./data/interim/waves.csv" \
--tides-csv "./data/interim/tides.csv" \
--profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \
--runup-function "sto06" \
--slope "mean" \
--output-file "./data/interim/twl_mean_slope_sto06.csv"
./data/interim/impacts_observed.csv: ./data/interim/profiles.csv ./data/interim/profile_features.csv
pipenv run python ./src/analysis/observed_storm_impacts.py create-observed-impacts \
--profiles-csv "./data/interim/profiles.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \
--output-file "./data/interim/impacts_observed.csv"
./data/interim/impacts_forecasted_mean_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_mean_slope_sto06.csv
pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_mean_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
./data/interim/impacts_forecasted_foreshore_slope_sto06.csv: ./data/interim/profile_features.csv ./data/interim/twl_foreshore_slope_sto06.csv
pipenv run python ./src/analysis/forecasted_storm_impacts.py create-forecasted-impacts \
--profile-features-csv "./data/interim/profile_features.csv" \
--forecasted-twl-csv "./data/interim/twl_foreshore_slope_sto06.csv" \
--output-file "./data/interim/impacts_forecasted_foreshore_slope_sto06.csv"
#################################################################################
# PROJECT RULES #
#################################################################################
.PHONY: push-data parse_mat sites-csv-to-shp
mat-to-csv: ##@data Converts raw .mat files to .csv for python
cd ./src/data/ && python parse_mat.py
sites-csv-to-shp: ##@data Create the sites.shp from sites.csv
cd ./src/data && python csv_to_shp.py sites_csv_to_shp "..\..\data\interim\sites.csv" "..\..\data\interim\sites.shp"
###############################
# Misc commands
format: ./src/*.py ##@misc Check python file formatting
pipenv run black --line-length 120 "src/"
###############################
# Help command
.DEFAULT_GOAL := help
.PHONY: help
# Refer to https://gist.github.com/prwhite/8168133
#COLORS
GREEN := $(shell tput -Txterm setaf 2)
WHITE := $(shell tput -Txterm setaf 7)
YELLOW := $(shell tput -Txterm setaf 3)
RESET := $(shell tput -Txterm sgr0)
# Add the following 'help' target to your Makefile
# And add help text after each target name starting with '\#\#'
# A category can be added with @category
HELP_FUN = \
%help; \
while(<>) { push @{$$help{$$2 // 'options'}}, [$$1, $$3] if /^([a-zA-Z\-]+)\s*:.*\#\#(?:@([a-zA-Z\-]+))?\s(.*)$$/ }; \
print "usage: make [target]\n\n"; \
for (sort keys %help) { \
print "${WHITE}$$_:${RESET}\n"; \
for (@{$$help{$$_}}) { \
$$sep = " " x (32 - length $$_->[0]); \
print " ${YELLOW}$$_->[0]${RESET}$$sep${GREEN}$$_->[1]${RESET}\n"; \
}; \
print "\n"; }
help: ##@other Show this help.
@perl -e '$(HELP_FUN)' $(MAKEFILE_LIST)