Compare commits

...

6 Commits

@ -167,6 +167,32 @@ impacts: ./data/interim/impacts_forecasted_foreshore_slope_sto06.csv ./data/inte
# --output-file "./data/interim/impacts_forecasted_poststorm_mean_slope_sto06.csv" # --output-file "./data/interim/impacts_forecasted_poststorm_mean_slope_sto06.csv"
./data/interim/impacts_forecasted_mean_slope_sto06.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv
activate ./.venv && python ./src/cli.py impacts-to-geojson \
--sites-csv "./data/interim/sites.csv" \
--observed-impacts-csv "./data/interim/impacts_observed.csv" \
--forecast-impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06.csv" \
--output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06.geojson"
./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson: ./data/interim/impacts_forecasted_mean_slope_sto06.csv
activate ./.venv && python ./src/cli.py R-high-to-geojson \
--sites-csv "./data/interim/sites.csv" \
--profile-csv "./data/interim/profiles.csv" \
--impacts-csv "./data/interim/impacts_forecasted_mean_slope_sto06" \
--output-geojson "./data/interim/impacts_forecasted_mean_slope_sto06_R_high.geojson"
./data/interim/profile_features.geojson: ./data/interim/profile_features.csv
activate ./.venv && python ./src/cli.py R-high-to-geojson \
--sites-csv "./data/interim/sites.csv" \
--profile-features-csv "./data/interim/profile_features.csv" \
--output-geojson "./data/interim/profile_features.geojson"
./data/interim/sites.geojson: ./data/interim/sites.csv
activate ./.venv && python ./src/cli.py sites-csv-to-geojson \
--input-csv "./data/interim/sites.csv" \
--output-geojson "./data/interim/sites.geojson"
############################### ###############################
# Misc commands # Misc commands
format: ./src/*.py ##@misc Check python file formatting format: ./src/*.py ##@misc Check python file formatting

@ -6,7 +6,7 @@ import os
import pandas as pd import pandas as pd
from utils import setup_logging from logs import setup_logging
logger = setup_logging() logger = setup_logging()

@ -3,12 +3,12 @@ from multiprocessing import Pool
import click import click
import numpy as np import numpy as np
import numpy.ma as ma
import pandas as pd import pandas as pd
from scipy import stats from scipy import stats
from analysis import runup_models from analysis import runup_models
from utils import setup_logging from utils import crossings
from logs import setup_logging
logger = setup_logging() logger = setup_logging()
@ -270,35 +270,6 @@ def slope_from_profile(profile_x, profile_z, top_elevation, btm_elevation, metho
return -slope return -slope
def crossings(profile_x, profile_z, constant_z):
"""
Finds the x coordinate of a z elevation for a beach profile. Much faster than using shapely to calculate
intersections since we are only interested in intersections of a constant value. Will return multiple
intersections if found. Used in calculating beach slope.
Adapted from https://stackoverflow.com/a/34745789
:param profile_x: List of x coordinates for the beach profile section
:param profile_z: List of z coordinates for the beach profile section
:param constant_z: Float of the elevation to find corresponding x coordinates
:return: List of x coordinates which correspond to the constant_z
"""
# Remove nans to suppress warning messages
valid = ~ma.masked_invalid(profile_z).mask
profile_z = np.array(profile_z)[valid]
profile_x = np.array(profile_x)[valid]
# Normalize the 'signal' to zero.
# Use np.subtract rather than a list comprehension for performance reasons
z = np.subtract(profile_z, constant_z)
# Find all indices right before any crossing.
# TODO Sometimes this can give a runtime warning https://stackoverflow.com/a/36489085
indicies = np.where(z[:-1] * z[1:] < 0)[0]
# Use linear interpolation to find intersample crossings.
return [profile_x[i] - (profile_x[i] - profile_x[i + 1]) / (z[i] - z[i + 1]) * (z[i]) for i in indicies]
@click.command() @click.command()
@click.option("--waves-csv", required=True, help="") @click.option("--waves-csv", required=True, help="")
@click.option("--tides-csv", required=True, help="") @click.option("--tides-csv", required=True, help="")

@ -5,7 +5,7 @@ Estimates the forecasted storm impacts based on the forecasted water level and d
import click import click
import pandas as pd import pandas as pd
from utils import setup_logging from logs import setup_logging
logger = setup_logging() logger = setup_logging()

@ -3,7 +3,7 @@ import numpy as np
import pandas as pd import pandas as pd
from scipy.integrate import simps from scipy.integrate import simps
from utils import setup_logging from logs import setup_logging
logger = setup_logging() logger = setup_logging()

@ -2,18 +2,17 @@
Entry point to run data processing and analysis commands. Entry point to run data processing and analysis commands.
""" """
# Disable numpy warnings
import warnings
import click import click
import data.parse_mat as parse_mat
import data.parse_shp as profile_features
import data.csv_to_shp as csv_to_shp
import analysis.forecast_twl as forecast_twl import analysis.forecast_twl as forecast_twl
import analysis.forecasted_storm_impacts as forecasted_storm_impacts import analysis.forecasted_storm_impacts as forecasted_storm_impacts
import analysis.observed_storm_impacts as observed_storm_impacts import analysis.observed_storm_impacts as observed_storm_impacts
import data.apply_manual_overwrites as apply_manual_overwrites import data.apply_manual_overwrites as apply_manual_overwrites
import data.csv_to_geojson as csv_to_geojson
# Disable numpy warnings import data.parse_mat as parse_mat
import warnings
warnings.simplefilter(action="ignore", category=FutureWarning) warnings.simplefilter(action="ignore", category=FutureWarning)
@ -24,14 +23,16 @@ def cli():
if __name__ == "__main__": if __name__ == "__main__":
cli.add_command(parse_mat.create_waves_csv) cli.add_command(apply_manual_overwrites.apply_profile_features_overwrite)
cli.add_command(parse_mat.create_sites_and_profiles_csv) cli.add_command(csv_to_geojson.impacts_to_geojson)
cli.add_command(parse_mat.create_tides_csv) cli.add_command(csv_to_geojson.profile_features_to_geojson)
cli.add_command(parse_mat.create_profile_features) cli.add_command(csv_to_geojson.R_high_to_geojson)
# cli.add_command(profile_features.create_profile_features) cli.add_command(csv_to_geojson.sites_csv_to_geojson)
cli.add_command(csv_to_shp.sites_csv_to_geojson)
cli.add_command(forecast_twl.create_twl_forecast) cli.add_command(forecast_twl.create_twl_forecast)
cli.add_command(forecasted_storm_impacts.create_forecasted_impacts) cli.add_command(forecasted_storm_impacts.create_forecasted_impacts)
cli.add_command(observed_storm_impacts.create_observed_impacts) cli.add_command(observed_storm_impacts.create_observed_impacts)
cli.add_command(apply_manual_overwrites.apply_profile_features_overwrite) cli.add_command(parse_mat.create_profile_features)
cli.add_command(parse_mat.create_sites_and_profiles_csv)
cli.add_command(parse_mat.create_tides_csv)
cli.add_command(parse_mat.create_waves_csv)
cli() cli()

@ -5,7 +5,7 @@ After generating interim data files based on raw data, we may need to overwrite
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import click import click
from utils import setup_logging from logs import setup_logging
logger = setup_logging() logger = setup_logging()

@ -0,0 +1,298 @@
"""
Converts .csv files to .shape files
"""
import os
import numpy.ma as ma
import click
import fiona
import numpy as np
import pandas as pd
from fiona.crs import from_epsg
from shapely.geometry import Point, mapping, LineString
from collections import OrderedDict
from utils import crossings, convert_coord_systems
from logs import setup_logging
logger = setup_logging()
def lat_lon_from_profile_x_coord(center_lat_lon, orientation, center_profile_x, x_coord):
"""
Returns the lat/lon of a point on a profile with the given x_coord
:param center_lat_lon: Shapely point of lat/lon of profile center
:param orientation: Orientation of the profile (positive east, counterclockwise)
:param center_profile_x: x value of the center of the profile
:param x_coord: X coordinate of the point to get a lat lon from
:return:
"""
center_xy = convert_coord_systems(center_lat_lon)
center_x, center_y = center_xy.xy
point_x = center_x + (center_profile_x - x_coord) * np.cos(np.deg2rad(orientation))
point_y = center_y + (center_profile_x - x_coord) * np.sin(np.deg2rad(orientation))
point_xy = Point(point_x, point_y)
point_lat_lon = convert_coord_systems(point_xy, in_coord_system="EPSG:28356", out_coord_system="EPSG:4326")
return point_lat_lon
@click.command()
@click.option("--sites-csv", required=True, help=".csv file to convert")
@click.option("--profile-csv", required=True, help=".csv file to convert")
@click.option("--impacts-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
"""
Converts impact R_high into a lat/lon geojson that we can plot in QGIS
:param sites_csv:
:param profiles_csv:
:param impacts_csv:
:param output_geojson:
:return:
"""
sites_csv = "./data/interim/sites.csv"
profiles_csv = "./data/interim/profiles.csv"
impacts_csv = "./data/interim/impacts_forecasted_mean_slope_sto06.csv"
output_geojson = "./data/interim/R_high_forecasted_mean_slope_sto06.geojson"
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profiles = pd.read_csv(profiles_csv, index_col=[0, 1, 2])
df_impacts = pd.read_csv(impacts_csv, index_col=[0])
# Create geojson file
schema = {
"geometry": "Point",
"properties": OrderedDict([("beach", "str"), ("site_id", "str"), ("elevation", "float")]),
}
with fiona.open(output_geojson, "w", driver="GeoJSON", crs=from_epsg(4326), schema=schema) as output:
for index, row in df_impacts.iterrows():
site_id = index
beach = index[:-4]
# Find lat/lon of R_high position
R_high_z = row["R_high"]
# Get poststorm profile (or should this be prestorm?)
df_profile = df_profiles.query('site_id=="{}" & profile_type=="prestorm"'.format(index))
int_x = crossings(df_profile.index.get_level_values("x").tolist(), df_profile.z.tolist(), R_high_z)
# Take most landward interesection. Continue to next site if there is no intersection
try:
int_x = max(int_x)
except:
continue
# Get lat/lon on intercept position
site = df_sites.loc[site_id]
point_lat_lon = lat_lon_from_profile_x_coord(
center_lat_lon=Point(site["lon"], site["lat"]),
orientation=site["orientation"],
center_profile_x=site["profile_x_lat_lon"],
x_coord=int_x,
)
prop = OrderedDict([("beach", beach), ("site_id", site_id), ("elevation", R_high_z)])
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
@click.command()
@click.option("--sites-csv", required=True, help=".csv file to convert")
@click.option("--profile-features-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def profile_features_to_geojson(sites_csv, profile_features_csv, output_geojson):
"""
Converts profile_features containing dune toes and crest locations to a geojson we can load into QGIS
:param sites_csv:
:param profiles_csv:
:param profile_features_csv:
:param output_geojson:
:return:
"""
logger.info("Creating profile features geojson")
# Read files from interim folder
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profile_features = pd.read_csv(profile_features_csv, index_col=[0])
# Create geojson file
schema = {
"geometry": "Point",
"properties": OrderedDict(
[
("beach", "str"),
("site_id", "str"),
(
"point_type",
"str",
), # prestorm_dune_toe, prestorm_dune_crest, poststorm_dune_toe, poststorm_dune_crest
("profile_type", "str"),
("elevation", "float"),
]
),
}
with fiona.open(output_geojson, "w", driver="GeoJSON", crs=from_epsg(4326), schema=schema) as output:
for index, row in df_profile_features.iterrows():
beach = index[:-4]
site_id = index
profile_type = row["profile_type"]
for point_type in ["crest", "toe"]:
# point_type='crest'
elevation = row["dune_{}_z".format(point_type)]
x = row["dune_{}_x".format(point_type)]
if np.isnan(x):
continue
# Geojsons need to use 'null' instead of 'nan'
if np.isnan(elevation):
elevation = None
# Convert x position to lat/lon
site = df_sites.loc[site_id]
point_lat_lon = lat_lon_from_profile_x_coord(
center_lat_lon=Point(site["lon"], site["lat"]),
orientation=site["orientation"],
center_profile_x=site["profile_x_lat_lon"],
x_coord=x,
)
prop = OrderedDict(
[
("beach", beach),
("site_id", site_id),
("point_type", point_type),
("profile_type", profile_type),
("elevation", elevation),
]
)
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
@click.command()
@click.option("--input-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def sites_csv_to_geojson(input_csv, output_geojson):
"""
Converts our dataframe of sites to .geojson to load in QGis. Sites are loaded as linestrings of the profile
cross-sections
:param input_csv:
:param output_geojson:
:return:
"""
logger.info("Converting %s to %s", input_csv, output_geojson)
df_sites = pd.read_csv(input_csv, index_col=[0])
logger.info(os.environ.get("GDAL_DATA", None))
schema = {"geometry": "LineString", "properties": OrderedDict([("beach", "str"), ("site_id", "str")])}
with fiona.open(output_geojson, "w", driver="GeoJSON", crs=from_epsg(4326), schema=schema) as output:
for index, row in df_sites.iterrows():
center_lat_lon = Point(row["lon"], row["lat"])
# Work out where landward profile limit is
land_lat_lon = lat_lon_from_profile_x_coord(
center_lat_lon=center_lat_lon,
orientation=row["orientation"],
center_profile_x=row["profile_x_lat_lon"],
x_coord=0,
)
# Work out where seaward profile limit is
sea_lat_lon = lat_lon_from_profile_x_coord(
center_lat_lon=center_lat_lon,
orientation=row["orientation"],
center_profile_x=row["profile_x_lat_lon"],
x_coord=2 * row["profile_x_lat_lon"],
)
line_string = LineString([land_lat_lon, center_lat_lon, sea_lat_lon])
prop = OrderedDict([("beach", row["beach"]), ("site_id", index)])
output.write({"geometry": mapping(line_string), "properties": prop})
logger.info("Done!")
@click.command()
@click.option("--sites-csv", required=True, help="sites.csv file to convert")
@click.option("--observed-impacts-csv", required=True, help="impacts-observed.csv file to convert")
@click.option("--forecast-impacts-csv", required=True, help="impacts-forecast.csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def impacts_to_geojson(sites_csv, observed_impacts_csv, forecast_impacts_csv, output_geojson):
"""
Converts impacts observed and forecasted to a geojson for visualization in QGIS
:param sites_csv:
:param observed_impacts_csv:
:param forecast_impacts_csv:
:param output_geojson:
:return:
"""
# Get information from .csv and read into pandas dataframe
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_observed = pd.read_csv(observed_impacts_csv, index_col=[0])
df_forecast = pd.read_csv(forecast_impacts_csv, index_col=[0]).rename({"storm_regime": "forecast_storm_regime"})
# Rename columns, so we can distinguish between forecast and observed
df_observed = df_observed.rename(columns={"storm_regime": "observed_storm_regime"})
df_forecast = df_forecast.rename(columns={"storm_regime": "forecast_storm_regime"})
# Concat into one big dataframe
df = pd.concat([df_sites, df_observed, df_forecast], sort=True, axis=1)
# Make new column for accuracy of forecast. Use underpredict/correct/overpredict classes
df.loc[df.observed_storm_regime == df.forecast_storm_regime, "forecast_accuray"] = "correct"
# Observed/Forecasted/Class for each combination
classes = [
("swash", "collision", "overpredict"),
("swash", "swash", "correct"),
("swash", "overwash", "overpredict"),
("collision", "swash", "underpredict"),
("collision", "collision", "correct"),
("collision", "overwash", "overpredict"),
("overwash", "swash", "underpredict"),
("overwash", "collision", "underpredict"),
("overwash", "overwash", "correct"),
]
for c in classes:
df.loc[(df.observed_storm_regime == c[0]) & (df.forecast_storm_regime == c[1]), "forecast_accuracy"] = c[2]
schema = {
"geometry": "Point",
"properties": OrderedDict(
[
("beach", "str"),
("site_id", "str"),
("forecast_storm_regime", "str"),
("observed_storm_regime", "str"),
("forecast_accuracy", "str"),
]
),
}
with fiona.open(output_geojson, "w", driver="GeoJSON", crs=from_epsg(4326), schema=schema) as output:
for index, row in df.iterrows():
# Locate the marker at the seaward end of the profile to avoid cluttering the coastline.
# Work out where seaward profile limit is
sea_lat_lon = lat_lon_from_profile_x_coord(
center_lat_lon=Point(row["lon"], row["lat"]),
orientation=row["orientation"],
center_profile_x=row["profile_x_lat_lon"],
x_coord=2 * row["profile_x_lat_lon"],
)
prop = OrderedDict(
[
("beach", row["beach"]),
("site_id", index),
("forecast_storm_regime", row["forecast_storm_regime"]),
("observed_storm_regime", row["observed_storm_regime"]),
("forecast_accuracy", row["forecast_accuracy"]),
]
)
output.write({"geometry": mapping(sea_lat_lon), "properties": prop})
logger.info("Done!")

@ -1,333 +0,0 @@
"""
Converts .csv files to .shape files
"""
import os
import numpy.ma as ma
import click
import fiona
import numpy as np
import pandas as pd
from fiona.crs import from_epsg
from shapely.geometry import Point, mapping, LineString
from collections import OrderedDict
from data.parse_shp import convert_coord_systems
from utils import setup_logging
logger = setup_logging()
def R_high_to_geojson(sites_csv, profiles_csv, impacts_csv, output_geojson):
sites_csv = './data/interim/sites.csv'
profiles_csv = './data/interim/profiles.csv'
impacts_csv = './data/interim/impacts_forecasted_mean_slope_sto06.csv'
output_geojson = './data/interim/R_high_forecasted_mean_slope_sto06.geojson'
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profiles = pd.read_csv(profiles_csv, index_col=[0,1,2])
df_impacts = pd.read_csv(impacts_csv, index_col=[0])
# Create geojson file
schema = {
'geometry': 'Point',
'properties': OrderedDict([
('beach', 'str'),
('site_id', 'str'),
('elevation', 'float'),
])
}
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
for index, row in df_impacts.iterrows():
site_id = index
beach = index[:-4]
# Find lat/lon of R_high position
R_high_z = row['R_high']
# Get poststorm profile (or should this be prestorm?)
df_profile = df_profiles.query('site_id=="{}" & profile_type=="prestorm"'.format(index))
int_x = crossings(df_profile.index.get_level_values('x').tolist(),
df_profile.z.tolist(),
R_high_z)
# Take most landward interesection. Continue to next site if there is no intersection
try:
int_x = max(int_x)
except:
continue
# Get lat/lon on intercept position
site = df_sites.loc[site_id]
center_profile_x = site["profile_x_lat_lon"]
orientation = site["orientation"]
center_lat_lon = Point(site['lon'], site['lat']) # Get lat/lon of center of profile
center_xy = convert_coord_systems(center_lat_lon)
center_x, center_y = center_xy.xy
# Calculate xy position of point and convert to lat/lon
point_x = center_x + (center_profile_x - int_x) * np.cos(np.deg2rad(orientation))
point_y = center_y + (center_profile_x - int_x) * np.sin(np.deg2rad(orientation))
point_xy = Point(point_x, point_y)
point_lat_lon = convert_coord_systems(point_xy,
in_coord_system="EPSG:28356",
out_coord_system="EPSG:4326")
prop = OrderedDict([
('beach', beach),
('site_id', site_id),
('elevation', R_high_z),
])
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
def crossings(profile_x, profile_z, constant_z):
"""
Finds the x coordinate of a z elevation for a beach profile. Much faster than using shapely to calculate
intersections since we are only interested in intersections of a constant value. Will return multiple
intersections if found. Used in calculating beach slope.
Adapted from https://stackoverflow.com/a/34745789
:param profile_x: List of x coordinates for the beach profile section
:param profile_z: List of z coordinates for the beach profile section
:param constant_z: Float of the elevation to find corresponding x coordinates
:return: List of x coordinates which correspond to the constant_z
"""
# Remove nans to suppress warning messages
valid = ~ma.masked_invalid(profile_z).mask
profile_z = np.array(profile_z)[valid]
profile_x = np.array(profile_x)[valid]
# Normalize the 'signal' to zero.
# Use np.subtract rather than a list comprehension for performance reasons
z = np.subtract(profile_z, constant_z)
# Find all indices right before any crossing.
# TODO Sometimes this can give a runtime warning https://stackoverflow.com/a/36489085
indicies = np.where(z[:-1] * z[1:] < 0)[0]
# Use linear interpolation to find intersample crossings.
return [profile_x[i] - (profile_x[i] - profile_x[i + 1]) / (z[i] - z[i + 1]) * (z[i]) for i in indicies]
@click.command()
@click.option("--sites-csv", required=True, help=".csv file to convert")
@click.option("--profile-features-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def profile_features_to_geojson(sites_csv, profile_features_csv, output_geojson):
"""
Converts profile_features containing dune toes and crest locations to a geojson we can load into QGIS
:param sites_csv:
:param profiles_csv:
:param profile_features_csv:
:param output_geojson:
:return:
"""
logger.info("Creating profile features geojson")
# Read files from interim folder
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_profile_features = pd.read_csv(profile_features_csv, index_col=[0])
# Create geojson file
schema = {
'geometry': 'Point',
'properties': OrderedDict([
('beach', 'str'),
('site_id', 'str'),
('point_type', 'str'), # prestorm_dune_toe, prestorm_dune_crest, poststorm_dune_toe, poststorm_dune_crest
('profile_type', 'str'),
('elevation', 'float'),
])
}
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
for index, row in df_profile_features.iterrows():
beach = index[:-4]
site_id = index
profile_type = row['profile_type']
for point_type in ['crest', 'toe']:
# point_type='crest'
elevation = row['dune_{}_z'.format(point_type)]
x = row['dune_{}_x'.format(point_type)]
if np.isnan(x):
continue
# Geojsons need to use 'null' instead of 'nan'
if np.isnan(elevation):
elevation = None
# Convert x position to lat/lon
site = df_sites.loc[site_id]
center_profile_x = site["profile_x_lat_lon"]
orientation = site["orientation"]
center_lat_lon = Point(site['lon'], site['lat']) # Get lat/lon of center of profile
center_xy = convert_coord_systems(center_lat_lon)
center_x, center_y = center_xy.xy
# Calculate xy position of point and convert to lat/lon
point_x = center_x + (center_profile_x - x) * np.cos(np.deg2rad(orientation))
point_y = center_y + (center_profile_x - x) * np.sin(np.deg2rad(orientation))
point_xy = Point(point_x, point_y)
point_lat_lon = convert_coord_systems(point_xy,
in_coord_system="EPSG:28356",
out_coord_system="EPSG:4326")
prop = OrderedDict([
('beach', beach),
('site_id',site_id),
('point_type', point_type),
('profile_type', profile_type),
('elevation', elevation),
])
output.write({"geometry": mapping(point_lat_lon), "properties": prop})
@click.command()
@click.option("--input-csv", required=True, help=".csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def sites_csv_to_geojson(input_csv, output_geojson):
"""
Converts our dataframe of sites to .geojson to load in QGis. Sites are loaded as linestrings of the profile
cross-sections
:param input_csv:
:param output_geojson:
:return:
"""
logger.info("Converting %s to %s", input_csv, output_geojson)
df_sites = pd.read_csv(input_csv, index_col=[0])
logger.info(os.environ.get("GDAL_DATA", None))
schema = {
'geometry': 'LineString',
'properties': OrderedDict([
('beach','str'),
('site_id', 'str'),
])
}
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
for index, row in df_sites.iterrows():
# Work out where center of profile is
orientation = row["orientation"]
center_profile_x = row["profile_x_lat_lon"]
center_lon = row["lon"]
center_lat = row["lat"]
center_lat_lon = Point(center_lon, center_lat)
center_xy = convert_coord_systems(center_lat_lon)
center_x, center_y = center_xy.xy
# Work out where landward profile limit is
land_x = center_x + center_profile_x * np.cos(np.deg2rad(orientation))
land_y = center_y + center_profile_x * np.sin(np.deg2rad(orientation))
land_xy = Point(land_x, land_y)
land_lat_lon = convert_coord_systems(land_xy, in_coord_system="EPSG:28356",
out_coord_system="EPSG:4326")
# Work out where seaward profile limit is
sea_x = center_x - center_profile_x * np.cos(np.deg2rad(orientation))
sea_y = center_y - center_profile_x * np.sin(np.deg2rad(orientation))
sea_xy = Point(sea_x, sea_y)
sea_lat_lon = convert_coord_systems(sea_xy, in_coord_system="EPSG:28356", out_coord_system="EPSG:4326")
line_string = LineString([land_lat_lon, center_lat_lon, sea_lat_lon])
prop = OrderedDict([("beach", row["beach"]),
("site_id", index)])
output.write({"geometry": mapping(line_string), "properties": prop})
logger.info("Done!")
@click.command()
@click.option("--sites-csv", required=True, help="sites.csv file to convert")
@click.option("--observed-impacts-csv", required=True, help="impacts-observed.csv file to convert")
@click.option("--forecast-impacts-csv", required=True, help="impacts-forecast.csv file to convert")
@click.option("--output-geojson", required=True, help="where to store .geojson file")
def impacts_to_geojson(sites_csv, observed_impacts_csv, forecast_impacts_csv, output_geojson):
"""
Converts impacts observed and forecasted to a geojson for visualization in QGIS
:param sites_csv:
:param observed_impacts_csv:
:param forecast_impacts_csv:
:param output_geojson:
:return:
"""
# Get information from .csv and read into pandas dataframe
df_sites = pd.read_csv(sites_csv, index_col=[0])
df_observed = pd.read_csv(observed_impacts_csv, index_col=[0])
df_forecast = pd.read_csv(forecast_impacts_csv, index_col=[0]).rename({'storm_regime': 'forecast_storm_regime'})
# Rename columns, so we can distinguish between forecast and observed
df_observed = df_observed.rename(columns={'storm_regime': 'observed_storm_regime'})
df_forecast = df_forecast.rename(columns={'storm_regime': 'forecast_storm_regime'})
# Concat into one big dataframe
df = pd.concat([df_sites, df_observed, df_forecast], sort=True,axis=1)
# Make new column for accuracy of forecast. Use underpredict/correct/overpredict classes
df.loc[df.observed_storm_regime == df.forecast_storm_regime, 'forecast_accuray'] = 'correct'
# Observed/Forecasted/Class for each combination
classes = [('swash', 'collision', 'overpredict'),
('swash', 'swash', 'correct'),
('swash', 'overwash', 'overpredict'),
('collision', 'swash', 'underpredict'),
('collision', 'collision', 'correct'),
('collision', 'overwash', 'overpredict'),
('overwash', 'swash', 'underpredict'),
('overwash', 'collision', 'underpredict'),
('overwash', 'overwash', 'correct')]
for c in classes:
df.loc[(df.observed_storm_regime == c[0])&(df.forecast_storm_regime == c[1]), 'forecast_accuracy'] = c[2]
schema = {
'geometry': 'Point',
'properties': OrderedDict([
('beach','str'),
('site_id', 'str'),
('forecast_storm_regime', 'str'),
('observed_storm_regime', 'str',),
('forecast_accuracy', 'str')
])
}
# TODO Impact marker location should be at the seaward end of the profile
with fiona.open(output_geojson, 'w', driver='GeoJSON', crs=from_epsg(4326), schema=schema) as output:
for index, row in df.iterrows():
# Locate the marker at the seaward end of the profile to avoid cluttering the coastline.
# Work out where seaward profile limit is
orientation = row["orientation"]
center_profile_x = row["profile_x_lat_lon"]
center_lon = row["lon"]
center_lat = row["lat"]
center_lat_lon = Point(center_lon, center_lat)
center_xy = convert_coord_systems(center_lat_lon)
center_x, center_y = center_xy.xy
sea_x = center_x - center_profile_x * np.cos(np.deg2rad(orientation))
sea_y = center_y - center_profile_x * np.sin(np.deg2rad(orientation))
sea_xy = Point(sea_x, sea_y)
sea_lat_lon = convert_coord_systems(sea_xy, in_coord_system="EPSG:28356", out_coord_system="EPSG:4326")
prop = OrderedDict([
('beach',row["beach"]),
('site_id', index),
('forecast_storm_regime', row["forecast_storm_regime"]),
('observed_storm_regime', row["observed_storm_regime"],),
('forecast_accuracy', row["forecast_accuracy"])
])
output.write({"geometry": mapping(sea_lat_lon), "properties": prop})
logger.info("Done!")

@ -11,8 +11,8 @@ import pandas as pd
from mat4py import loadmat from mat4py import loadmat
from shapely.geometry import Point from shapely.geometry import Point
from data.parse_shp import convert_coord_systems from utils import convert_coord_systems
from utils import setup_logging from logs import setup_logging
logger = setup_logging() logger = setup_logging()

@ -1,38 +1,20 @@
from functools import partial """
This file can probably be removed since we are not reading from shp files any more
"""
import click import click
import fiona import fiona
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pyproj
from shapely.geometry import LineString, Point from shapely.geometry import LineString, Point
from shapely.geometry import shape from shapely.geometry import shape
from shapely.ops import transform
from utils import setup_logging from logs import setup_logging
from utils import convert_coord_systems
logger = setup_logging() logger = setup_logging()
def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPSG:28356"):
"""
Converts coordinates from one coordinates system to another. Needed because shapefiles are usually defined in
lat/lon but should be converted to GDA to calculated distances.
https://gis.stackexchange.com/a/127432
:param in_coord_system: Default is lat/lon WGS84
:param out_coord_system: Default is GDA56 for NSW coastline
:return:
"""
project = partial(
pyproj.transform,
pyproj.Proj(init=in_coord_system), # source coordinate system
pyproj.Proj(init=out_coord_system),
) # destination coordinate system
g2 = transform(project, g1) # apply projection
return g2
def shapes_from_shp(shp_file): def shapes_from_shp(shp_file):
""" """
Parses a shape file and returns a list of shapely shapes, ids and properties Parses a shape file and returns a list of shapely shapes, ids and properties

@ -0,0 +1,17 @@
import logging.config
import os
import yaml
def setup_logging(path="./src/logging.yaml", default_level=logging.INFO):
"""
Setup logging configuration
"""
if os.path.exists(path):
with open(path, "rt") as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
return logging.getLogger(__name__)

@ -1,16 +1,55 @@
import logging.config from functools import partial
import os
import yaml
import numpy as np
import pyproj
from numpy import ma as ma
from shapely.ops import transform
def setup_logging(path="./src/logging.yaml", default_level=logging.INFO):
def crossings(profile_x, profile_z, constant_z):
"""
Finds the x coordinate of a z elevation for a beach profile. Much faster than using shapely to calculate
intersections since we are only interested in intersections of a constant value. Will return multiple
intersections if found. Used in calculating beach slope.
Adapted from https://stackoverflow.com/a/34745789
:param profile_x: List of x coordinates for the beach profile section
:param profile_z: List of z coordinates for the beach profile section
:param constant_z: Float of the elevation to find corresponding x coordinates
:return: List of x coordinates which correspond to the constant_z
"""
# Remove nans to suppress warning messages
valid = ~ma.masked_invalid(profile_z).mask
profile_z = np.array(profile_z)[valid]
profile_x = np.array(profile_x)[valid]
# Normalize the 'signal' to zero.
# Use np.subtract rather than a list comprehension for performance reasons
z = np.subtract(profile_z, constant_z)
# Find all indices right before any crossing.
# TODO Sometimes this can give a runtime warning https://stackoverflow.com/a/36489085
indicies = np.where(z[:-1] * z[1:] < 0)[0]
# Use linear interpolation to find intersample crossings.
return [profile_x[i] - (profile_x[i] - profile_x[i + 1]) / (z[i] - z[i + 1]) * (z[i]) for i in indicies]
# TODO Think of a better way to do this than having to manually specify the coordinate systems
def convert_coord_systems(g1, in_coord_system="EPSG:4326", out_coord_system="EPSG:28356"):
""" """
Setup logging configuration Converts coordinates from one coordinates system to another. Needed because shapefiles are usually defined in
lat/lon but should be converted to GDA to calculated distances.
https://gis.stackexchange.com/a/127432
:param in_coord_system: Default is lat/lon WGS84
:param out_coord_system: Default is GDA56 for NSW coastline
:return:
""" """
if os.path.exists(path): project = partial(
with open(path, "rt") as f: pyproj.transform,
config = yaml.safe_load(f.read()) pyproj.Proj(init=in_coord_system), # source coordinate system
logging.config.dictConfig(config) pyproj.Proj(init=out_coord_system),
else: ) # destination coordinate system
logging.basicConfig(level=default_level)
return logging.getLogger(__name__) g2 = transform(project, g1) # apply projection
return g2

Loading…
Cancel
Save