You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

175 lines
6.4 KiB
Python

# python 3.5
#requires LAStools to be installed (with the appropriate license). Note that LAStools requires no spaces in file names
#should have previously run 2017088_las_manipulation to have a las that has the buildings and veg removed
#note that the neilson volumes script must be in the same folder
# this script will:
#crop to a given polygon (crop away the swash zone)
# extract values along a predefined profile,
# do the volume analysis
#export pngs of the surveys
########################### IMPORTS ###########################################
import os
import io
import re
import math
import datetime
import subprocess
import numpy as np
import pandas as pd
from cycler import cycler
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
import neilson_volumes
from survey_tools import call_lastools, extract_pts, update_survey_output
def remove_temp_files(directory):
for f in os.listdir(directory):
os.unlink(os.path.join(directory, f))
return None
def plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits):
csv_name = profile_name + '.csv'
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name))
# Remove metadata, and extract profile coordinates
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
# Find landward limit of profile (behind beach)
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
ax = plt.axes()
for col in profiles.columns:
profile = profiles.loc[ch_min:, col]
date_str = str(survey_date)
date = '{}-{}-{}'.format(date_str[:4], date_str[4:6], date_str[6:])
ax.plot(profile.index, profile, label=date)
ax.set_xlabel('Chainage (m)', labelpad=10)
ax.set_ylabel('Elevation (m AHD)', labelpad=10)
ax.legend(frameon=False, fontsize=9)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(0.5))
ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3)
ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3)
png_name = os.path.join(graph_loc, profile_name + '.png')
plt.savefig(png_name, bbox_inches='tight', dpi=300)
plt.close()
def calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volume_output_dir):
csv_prof = profile_name + '.csv'
beach = re.search('.*(?=_\d)', profile_name).group()
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof))
# Remove metadata, and extract profile coordinates
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
# Find landward limit of profile (behind beach)
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
# Open volume spreadsheet
csv_vol = os.path.join(volume_output_dir, 'volumes.csv')
try:
volumes = pd.read_csv(csv_vol, index_col=0)
except FileNotFoundError:
volumes = pd.DataFrame()
# Format dates
date_str = str(survey_date)
date = '{}-{}-{}'.format(date_str[:4], date_str[4:6], date_str[6:])
for current_date in profiles.columns:
# Get Nielsen erosion volumes
chainage = profiles.loc[:, current_date].dropna().index
elevation = profiles.loc[:, current_date].dropna().values
volume = neilson_volumes.volume_available(chainage, elevation, ch_min)
# Update spreadsheet
volumes.loc[profile_name, date] = volume
# Save updated volumes spreadsheet
volumes = volumes.sort_index()
volumes.to_csv(csv_vol)
input_file = 'Parameter Files/las-manipulation-survey-2.xlsx'
params_file=pd.read_excel(input_file, sheet_name="PARAMS")
for i, row in params_file.iterrows():
print("Starting to process %s" % row['Beach'])
beach=row['Beach']
survey_date = row['SURVEY DATE']
original_las = row['INPUT LAS']
classified_las_dir = row['LAS CLASSIFIED FOLDER']
shp_swash_dir = row['SHP SWASH FOLDER']
crop_heatmap_poly = row['HEATMAP CROP POLY']
output_las_dir = row['LAS OUTPUT FOLDER']
zone_MGA = row['ZONE MGA']
output_poly_dir = row['SHP RASTER FOLDER']
output_tif_dir = row['TIF OUTPUT FOLDER']
cp_csv = row['INPUT CSV']
profile_limit_file = row['PROFILE LIMIT FILE']
csv_output_dir = row['CSV OUTPUT FOLDER']
graph_loc = row['PNG OUTPUT FOLDER']
volume_output_dir = row['CSV VOLUMES FOLDER']
tmp_dir = row['TMP FOLDER']
# Get base name of input las
las_basename = os.path.splitext(os.path.basename(original_las))[0]
# Get name of input point cloud
input_las = os.path.join(classified_las_dir, las_basename + '.las')
# Get name of swash cropping polygon
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
# Crop point cloud to swash boundary
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
args=['-poly', crop_swash_poly], verbose=False)
# Apply sea-side clipping polygon
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
args=['-poly', crop_heatmap_poly], verbose=False)
# Create clipping polygon for heatmap raster
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
# Make a raster from point cloud
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
call_lastools('blast2dem', input=las_data, output=tif_name,
args=['-step', 0.2], verbose=False)
# Extract elevations along profiles from triangulated surface
df = extract_pts(
las_data,
cp_csv,
survey_date,
beach,
args=['-parse', 'sxyz', '-keep_class', '2'],
verbose=False)
# Update survey profiles
update_survey_output(df, csv_output_dir)
# Get landward limit of surveys
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
# Plot profiles, and save sand volumes for current beach
profile_names = df['Profile'].unique()
for profile_name in profile_names:
plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits)
calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volume_output_dir)
# Remove temprary files
remove_temp_files(tmp_dir)