|
|
|
@ -13,7 +13,9 @@
|
|
|
|
|
import os
|
|
|
|
|
import io
|
|
|
|
|
import re
|
|
|
|
|
import sys
|
|
|
|
|
import math
|
|
|
|
|
import argparse
|
|
|
|
|
import datetime
|
|
|
|
|
import subprocess
|
|
|
|
|
import numpy as np
|
|
|
|
@ -91,7 +93,7 @@ def calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volu
|
|
|
|
|
# Get Nielsen erosion volumes
|
|
|
|
|
chainage = profiles.loc[:, current_date].dropna().index
|
|
|
|
|
elevation = profiles.loc[:, current_date].dropna().values
|
|
|
|
|
volume = neilson_volumes.volume_available(chainage, elevation, ch_min)
|
|
|
|
|
volume = nielsen_volumes.volume_available(chainage, elevation, ch_min)
|
|
|
|
|
|
|
|
|
|
# Update spreadsheet
|
|
|
|
|
volumes.loc[profile_name, date] = volume
|
|
|
|
@ -101,74 +103,100 @@ def calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volu
|
|
|
|
|
volumes.to_csv(csv_vol)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
input_file = 'Parameter Files/las-manipulation-survey-2.xlsx'
|
|
|
|
|
params_file=pd.read_excel(input_file, sheet_name="PARAMS")
|
|
|
|
|
|
|
|
|
|
for i, row in params_file.iterrows():
|
|
|
|
|
print("Starting to process %s" % row['Beach'])
|
|
|
|
|
beach=row['Beach']
|
|
|
|
|
survey_date = row['SURVEY DATE']
|
|
|
|
|
original_las = row['INPUT LAS']
|
|
|
|
|
classified_las_dir = row['LAS CLASSIFIED FOLDER']
|
|
|
|
|
shp_swash_dir = row['SHP SWASH FOLDER']
|
|
|
|
|
crop_heatmap_poly = row['HEATMAP CROP POLY']
|
|
|
|
|
output_las_dir = row['LAS OUTPUT FOLDER']
|
|
|
|
|
zone_MGA = row['ZONE MGA']
|
|
|
|
|
output_poly_dir = row['SHP RASTER FOLDER']
|
|
|
|
|
output_tif_dir = row['TIF OUTPUT FOLDER']
|
|
|
|
|
cp_csv = row['INPUT CSV']
|
|
|
|
|
profile_limit_file = row['PROFILE LIMIT FILE']
|
|
|
|
|
csv_output_dir = row['CSV OUTPUT FOLDER']
|
|
|
|
|
graph_loc = row['PNG OUTPUT FOLDER']
|
|
|
|
|
volume_output_dir = row['CSV VOLUMES FOLDER']
|
|
|
|
|
tmp_dir = row['TMP FOLDER']
|
|
|
|
|
|
|
|
|
|
# Get base name of input las
|
|
|
|
|
las_basename = os.path.splitext(os.path.basename(original_las))[0]
|
|
|
|
|
|
|
|
|
|
# Get name of input point cloud
|
|
|
|
|
input_las = os.path.join(classified_las_dir, las_basename + '.las')
|
|
|
|
|
|
|
|
|
|
# Get name of swash cropping polygon
|
|
|
|
|
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
|
|
|
|
|
|
|
|
|
|
# Crop point cloud to swash boundary
|
|
|
|
|
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
|
|
|
|
|
args=['-poly', crop_swash_poly], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Apply sea-side clipping polygon
|
|
|
|
|
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
|
|
|
|
|
args=['-poly', crop_heatmap_poly], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Create clipping polygon for heatmap raster
|
|
|
|
|
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
|
|
|
|
|
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
|
|
|
|
|
|
|
|
|
|
# Make a raster from point cloud
|
|
|
|
|
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
|
|
|
|
|
call_lastools('blast2dem', input=las_data, output=tif_name,
|
|
|
|
|
args=['-step', 0.2], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Extract elevations along profiles from triangulated surface
|
|
|
|
|
df = extract_pts(
|
|
|
|
|
las_data,
|
|
|
|
|
cp_csv,
|
|
|
|
|
survey_date,
|
|
|
|
|
beach,
|
|
|
|
|
args=['-parse', 'sxyz', '-keep_class', '2'],
|
|
|
|
|
verbose=False)
|
|
|
|
|
|
|
|
|
|
# Update survey profiles
|
|
|
|
|
update_survey_output(df, csv_output_dir)
|
|
|
|
|
|
|
|
|
|
# Get landward limit of surveys
|
|
|
|
|
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
|
|
|
|
|
|
|
|
|
|
# Plot profiles, and save sand volumes for current beach
|
|
|
|
|
profile_names = df['Profile'].unique()
|
|
|
|
|
for profile_name in profile_names:
|
|
|
|
|
plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits)
|
|
|
|
|
calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volume_output_dir)
|
|
|
|
|
|
|
|
|
|
# Remove temprary files
|
|
|
|
|
remove_temp_files(tmp_dir)
|
|
|
|
|
def main():
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'input_file',
|
|
|
|
|
metavar='PARAMS_FILE',
|
|
|
|
|
help='name of parameter file',
|
|
|
|
|
default=None)
|
|
|
|
|
|
|
|
|
|
# Print usage if no arguments are provided
|
|
|
|
|
if len(sys.argv) == 1:
|
|
|
|
|
parser.print_help(sys.stderr)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
# read the parameters file and scroll through it
|
|
|
|
|
input_file = args.input_file
|
|
|
|
|
params_file=pd.read_excel(input_file, sheet_name="PARAMS")
|
|
|
|
|
|
|
|
|
|
for i, row in params_file.iterrows():
|
|
|
|
|
print("Starting to process %s" % row['Beach'])
|
|
|
|
|
beach=row['Beach']
|
|
|
|
|
survey_date = row['SURVEY DATE']
|
|
|
|
|
original_las = row['INPUT LAS']
|
|
|
|
|
classified_las_dir = row['LAS CLASSIFIED FOLDER']
|
|
|
|
|
shp_swash_dir = row['SHP SWASH FOLDER']
|
|
|
|
|
crop_heatmap_poly = row['HEATMAP CROP POLY']
|
|
|
|
|
output_las_dir = row['LAS OUTPUT FOLDER']
|
|
|
|
|
zone_MGA = row['ZONE MGA']
|
|
|
|
|
output_poly_dir = row['SHP RASTER FOLDER']
|
|
|
|
|
output_tif_dir = row['TIF OUTPUT FOLDER']
|
|
|
|
|
cp_csv = row['INPUT CSV']
|
|
|
|
|
profile_limit_file = row['PROFILE LIMIT FILE']
|
|
|
|
|
csv_output_dir = row['CSV OUTPUT FOLDER']
|
|
|
|
|
graph_loc = row['PNG OUTPUT FOLDER']
|
|
|
|
|
volume_output_dir = row['CSV VOLUMES FOLDER']
|
|
|
|
|
tmp_dir = row['TMP FOLDER']
|
|
|
|
|
|
|
|
|
|
# Get base name of input las
|
|
|
|
|
las_basename = os.path.splitext(os.path.basename(original_las))[0]
|
|
|
|
|
|
|
|
|
|
# Get name of input point cloud
|
|
|
|
|
input_las = os.path.join(classified_las_dir, las_basename + '.las')
|
|
|
|
|
|
|
|
|
|
# Get name of swash cropping polygon
|
|
|
|
|
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
|
|
|
|
|
|
|
|
|
|
# Crop point cloud to swash boundary
|
|
|
|
|
print('Cropping swash...')
|
|
|
|
|
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
|
|
|
|
|
args=['-poly', crop_swash_poly], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Apply sea-side clipping polygon
|
|
|
|
|
print('Cropping back of beach...')
|
|
|
|
|
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
|
|
|
|
|
args=['-poly', crop_heatmap_poly], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Create clipping polygon for heatmap raster
|
|
|
|
|
print('Creating heat map cropping polygon...')
|
|
|
|
|
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
|
|
|
|
|
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
|
|
|
|
|
|
|
|
|
|
# Make a raster from point cloud
|
|
|
|
|
print('Creating heat map raster...')
|
|
|
|
|
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
|
|
|
|
|
call_lastools('blast2dem', input=las_data, output=tif_name,
|
|
|
|
|
args=['-step', 0.2], verbose=False)
|
|
|
|
|
|
|
|
|
|
# Extract elevations along profiles from triangulated surface
|
|
|
|
|
print('Extracting profile elevations...')
|
|
|
|
|
df = extract_pts(
|
|
|
|
|
las_data,
|
|
|
|
|
cp_csv,
|
|
|
|
|
survey_date,
|
|
|
|
|
beach,
|
|
|
|
|
args=['-parse', 'sxyz', '-keep_class', '2'],
|
|
|
|
|
verbose=False)
|
|
|
|
|
|
|
|
|
|
# Update survey profiles
|
|
|
|
|
update_survey_output(df, csv_output_dir)
|
|
|
|
|
|
|
|
|
|
# Get landward limit of surveys
|
|
|
|
|
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
|
|
|
|
|
|
|
|
|
|
# Plot profiles, and save sand volumes for current beach
|
|
|
|
|
print('Updating figures...')
|
|
|
|
|
profile_names = df['Profile'].unique()
|
|
|
|
|
for profile_name in profile_names:
|
|
|
|
|
plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits)
|
|
|
|
|
calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volume_output_dir)
|
|
|
|
|
|
|
|
|
|
# Remove temprary files
|
|
|
|
|
remove_temp_files(tmp_dir)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
main()
|
|
|
|
|