You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

245 lines
8.6 KiB
Python

7 years ago
# python 3.5
#requires LAStools to be installed (with the appropriate license). Note that LAStools requires no spaces in file names
#should have previously run 2017088_las_manipulation to have a las that has the buildings and veg removed
#note that the neilson volumes script must be in the same folder
# this script will:
#crop to a given polygon (crop away the swash zone)
# extract values along a predefined profile,
# do the volume analysis
#export pngs of the surveys
########################### IMPORTS ###########################################
import os
import io
import re
import sys
import math
import argparse
import datetime
7 years ago
import subprocess
import numpy as np
import pandas as pd
from cycler import cycler
7 years ago
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
import nielsen_volumes
from survey_tools import call_lastools, extract_pts, update_survey_output
7 years ago
def get_datestring(x):
"""Format a date integer into an ISO-8601 date string
Args:
x: integer representation of date string
Returns:
formatted date string
Examples:
>> get_datestring(19700101)
'1970-01-01'
"""
datestr = pd.datetime.strptime(str(x), '%Y%m%d').strftime('%Y-%m-%d')
return datestr
7 years ago
def remove_temp_files(directory):
for f in os.listdir(directory):
os.unlink(os.path.join(directory, f))
return None
def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol):
csv_name = profile_name + '.csv'
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name))
# Remove metadata, and extract profile coordinates
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
# Find landward limit of profile (behind beach)
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
# Set figure dimensions based on beach size
vertical_exag = 8
m_per_inch = 8
fig_h = profiles.dropna().values.max() / m_per_inch * vertical_exag
fig_w = (profiles.index.max() - ch_min) / m_per_inch
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
for col in profiles.columns:
profile = profiles.loc[ch_min:, col]
date_str = col.split('_')[-1]
date = get_datestring(date_str)
ax.plot(profile.index, profile, label=date)
ax.set_aspect(vertical_exag)
ax.set_xlabel('Chainage (m)', labelpad=10)
ax.set_ylabel('Elevation (m AHD)', labelpad=10)
# Show most recent volume change
ax.annotate('Most recent\nvolume change:\n{:0.1f} m$^3$/m'.format(delta_vol),
(0.05, 0.15), xycoords='axes fraction', fontsize=9,
backgroundcolor='#ffffff', linespacing=1.5)
ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(0.5))
ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3)
ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3)
png_name = os.path.join(graph_loc, profile_name + '.png')
plt.savefig(png_name, bbox_inches='tight', dpi=300)
plt.close()
def calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volume_output_dir):
csv_prof = profile_name + '.csv'
beach = re.search('.*(?=_\d)', profile_name).group()
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof))
# Remove metadata, and extract profile coordinates
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
# Find landward limit of profile (behind beach)
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
# Open volume spreadsheet
csv_vol = os.path.join(volume_output_dir, 'volumes.csv')
try:
volumes = pd.read_csv(csv_vol, index_col=0)
except FileNotFoundError:
volumes = pd.DataFrame()
# Format dates
date = get_datestring(survey_date)
for current_date in profiles.columns:
# Get Nielsen erosion volumes
chainage = profiles.loc[:, current_date].dropna().index
elevation = profiles.loc[:, current_date].dropna().values
volume = nielsen_volumes.volume_available(chainage, elevation, ch_min)
# Update spreadsheet
volumes.loc[profile_name, date] = volume
# Save updated volumes spreadsheet
volumes = volumes[volumes.columns.sort_values()]
volumes = volumes.sort_index()
volumes.to_csv(csv_vol)
# Get most recent volume difference for current profile
previous_vol = volumes.loc[profile_name].values[-2]
current_vol = volumes.loc[profile_name].values[-1]
delta_vol = current_vol - previous_vol
return delta_vol
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'input_file',
metavar='PARAMS_FILE',
help='name of parameter file',
default=None)
# Print usage if no arguments are provided
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
args = parser.parse_args()
# read the parameters file and scroll through it
input_file = args.input_file
params_file=pd.read_excel(input_file, sheet_name="PARAMS")
for i, row in params_file.iterrows():
print("Starting to process %s" % row['Beach'])
beach=row['Beach']
survey_date = row['SURVEY DATE']
original_las = row['INPUT LAS']
classified_las_dir = row['LAS CLASSIFIED FOLDER']
shp_swash_dir = row['SHP SWASH FOLDER']
crop_heatmap_poly = row['HEATMAP CROP POLY']
output_las_dir = row['LAS OUTPUT FOLDER']
zone_MGA = row['ZONE MGA']
output_poly_dir = row['SHP RASTER FOLDER']
output_tif_dir = row['TIF OUTPUT FOLDER']
cp_csv = row['INPUT CSV']
profile_limit_file = row['PROFILE LIMIT FILE']
csv_output_dir = row['CSV OUTPUT FOLDER']
graph_loc = row['PNG OUTPUT FOLDER']
volume_output_dir = row['CSV VOLUMES FOLDER']
tmp_dir = row['TMP FOLDER']
# Get base name of input las
las_basename = os.path.splitext(os.path.basename(original_las))[0]
# Get name of input point cloud
input_las = os.path.join(classified_las_dir, las_basename + '.las')
# Get name of swash cropping polygon
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
# Crop point cloud to swash boundary
print('Cropping swash...')
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
args=['-poly', crop_swash_poly], verbose=False)
# Apply sea-side clipping polygon
print('Cropping back of beach...')
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
args=['-poly', crop_heatmap_poly], verbose=False)
# Create clipping polygon for heatmap raster
print('Creating heat map cropping polygon...')
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
# Make a raster from point cloud
print('Creating heat map raster...')
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
call_lastools('blast2dem', input=las_data, output=tif_name,
args=['-step', 0.2], verbose=False)
# Extract elevations along profiles from triangulated surface
print('Extracting profile elevations...')
df = extract_pts(
las_data,
cp_csv,
survey_date,
beach,
args=['-parse', 'sxyz', '-keep_class', '2'],
verbose=False)
# Update survey profiles
update_survey_output(df, csv_output_dir)
# Get landward limit of surveys
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
# Plot profiles, and save sand volumes for current beach
print('Updating figures...')
profile_names = df['Profile'].unique()
for profile_name in profile_names:
delta_vol = calculate_volumes(profile_name, survey_date,
csv_output_dir, ch_limits,
volume_output_dir)
plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits,
delta_vol)
# Remove temprary files
remove_temp_files(tmp_dir)
if __name__ == '__main__':
main()