# python 3.5 #requires LAStools to be installed (with the appropriate license). Note that LAStools requires no spaces in file names #should have previously run 2017088_las_manipulation to have a las that has the buildings and veg removed #note that the neilson volumes script must be in the same folder # this script will: #crop to a given polygon (crop away the swash zone) # extract values along a predefined profile, # do the volume analysis #export pngs of the surveys ########################### IMPORTS ########################################### import os import io import subprocess import pandas as pd import numpy as np import neilson_volumes import matplotlib.pyplot as plt from matplotlib.ticker import MultipleLocator import datetime import xlsxwriter import math from cycler import cycler from survey_tools import call_lastools, extract_pts, update_survey_output def profile_plots_volume(csv_loc, LL_xlsx, output_xlsx, graph_location): #get a list of all csvs which will each be analysed file_list=[] for file in os.listdir(csv_loc): if file.endswith(".csv"): file_list.append(os.path.join(csv_loc, file)) #now read the LL file LL_limit_file=pd.read_excel(LL_xlsx, 'profile_locations') LL_info={} for i in range(0, len(LL_limit_file)): #make a dictionary that alllows you to search the LL prof="%s_%s" % (LL_limit_file['Profile'][i].split(" ")[0], LL_limit_file['Profile'][i].split(" ")[-1]) LL_info[prof]=LL_limit_file['Landward Limit'][i] all_dates=[] results_volume={} for file in file_list: #read the profile data - this should have all dates profile_data=CC_split_profile(file) profile=profile_data['info']['Profile'] #plot all of the profiles print(profile) plot_profiles(profile_data, profile, graph_location,LL_info[profile]) results_volume[profile]={} #nowgo through each date and do a neilson volume calculations for date in profile_data.keys(): if date!='info': if date not in all_dates: all_dates.append(date) chainage=profile_data[date]['Chainage'] elevation=[0 if pd.isnull(profile_data[date]['Elevation'][i]) else profile_data[date]['Elevation'][i] for i in range(0, len(profile_data[date]['Elevation']))] LL_limit=LL_info[profile] #do a neilson calculation to get the ZSA volume if len(elevation)>2: #if there aren't enough available points don't do it volume=neilson_volumes.volume_available(chainage, elevation, LL_limit) if volume<0: volume=0 print('%s %s has a negative volume available' % (profile, date)) else: volume=0 results_volume[profile][date]=volume #write an excel sheet which summarises the data workbook = xlsxwriter.Workbook(output_xlsx) worksheet=workbook.add_worksheet('Volumes') row=0 col=0 worksheet.write(row, col, 'Profile') for date in all_dates: col=col+1 worksheet.write(row, col, date) col=0 row=1 for prof in results_volume.keys(): worksheet.write(row, col, prof) for date in all_dates: col=col+1 try: vol=results_volume[prof][date] except KeyError: print("error with profile %s on %s" % (prof, date)) vol=None worksheet.write(row, col, vol) col=0 row=row+1 return results_volume def remove_temp_files(directory): for f in os.listdir(directory): os.unlink(os.path.join(directory, f)) return None def plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits): csv_name = profile_name + '.csv' profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name)) # Remove metadata, and extract profile coordinates profiles = profiles.loc[:, 'Chainage':].set_index('Chainage') # Find landward limit of profile (behind beach) ch_min = ch_limits.loc[profile_name, 'Landward Limit'] ax = plt.axes() for col in profiles.columns: profile = profiles.loc[ch_min:, col] date_str = str(survey_date) date = '{}-{}-{}'.format(date_str[:4], date_str[4:6], date_str[6:]) ax.plot(profile.index, profile, label=date) ax.set_xlabel('Chainage (m)', labelpad=10) ax.set_ylabel('Elevation (m AHD)', labelpad=10) ax.legend(frameon=False, fontsize=9) ax.xaxis.set_minor_locator(MultipleLocator(5)) ax.yaxis.set_minor_locator(MultipleLocator(0.5)) ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3) ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3) png_name = os.path.join(graph_loc, profile_name + '.png') plt.savefig(png_name, bbox_inches='tight', dpi=300) plt.close() input_file = 'Parameter Files/las-manipulation-survey-2.xlsx' params_file=pd.read_excel(input_file, sheet_name="PARAMS") for i, row in params_file.iterrows(): print("Starting to process %s" % row['Beach']) beach=row['Beach'] survey_date = row['SURVEY DATE'] original_las = row['INPUT LAS'] classified_las_dir = row['LAS CLASSIFIED FOLDER'] shp_swash_dir = row['SHP SWASH FOLDER'] crop_heatmap_poly = row['HEATMAP CROP POLY'] output_las_dir = row['LAS OUTPUT FOLDER'] zone_MGA = row['ZONE MGA'] output_poly_dir = row['SHP RASTER FOLDER'] output_tif_dir = row['TIF OUTPUT FOLDER'] cp_csv = row['INPUT CSV'] profile_limit_file = row['PROFILE LIMIT FILE'] csv_output_dir = row['CSV OUTPUT FOLDER'] graph_loc = row['PNG OUTPUT FOLDER'] volume_output = row['CSV VOLUMES FOLDER'] tmp_dir = row['TMP FOLDER'] # Get base name of input las las_basename = os.path.splitext(os.path.basename(original_las))[0] # Get name of input point cloud input_las = os.path.join(classified_las_dir, las_basename + '.las') # Get name of swash cropping polygon crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') # Crop point cloud to swash boundary las_data = call_lastools('lasclip', input=input_las, output='-stdout', args=['-poly', crop_swash_poly], verbose=False) # Apply sea-side clipping polygon las_data = call_lastools('lasclip', input=las_data, output='-stdout', args=['-poly', crop_heatmap_poly], verbose=False) # Create clipping polygon for heatmap raster shp_name = os.path.join(output_poly_dir, las_basename + '.shp') call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) # Make a raster from point cloud tif_name = os.path.join(output_tif_dir, las_basename + '.tif') call_lastools('blast2dem', input=las_data, output=tif_name, args=['-step', 0.2], verbose=False) # Extract elevations along profiles from triangulated surface df = extract_pts( las_data, cp_csv, survey_date, beach, args=['-parse', 'sxyz', '-keep_class', '2'], verbose=False) update_survey_output(df, csv_output_dir) # Get landward limit of surveys ch_limits = pd.read_excel(profile_limit_file, index_col='Profile') # Plot profiles of current beach profile_names = df['Profile'].unique() for profile_name in profile_names: plot_profiles(profile_name, survey_date, csv_output_dir, graph_loc, ch_limits) # Remove temprary files remove_temp_files(tmp_dir) print("doing the volume analysis") # test=profile_plots_volume(csv_output_dir, profile_limit_file, volume_output, graph_loc)