# python 3.5 #requires LAStools to be installed (with the appropriate license). Note that LAStools requires no spaces in file names #should have previously run 2017088_las_manipulation to have a las that has the buildings and veg removed #note that the neilson volumes script must be in the same folder # this script will: #crop to a given polygon (crop away the swash zone) # extract values along a predefined profile, # do the volume analysis #export pngs of the surveys ########################### IMPORTS ########################################### import os import io import subprocess import pandas as pd import numpy as np import neilson_volumes import matplotlib.pyplot as plt from matplotlib.ticker import MultipleLocator import datetime import xlsxwriter import math from cycler import cycler from survey_tools import call_lastools, extract_pts, update_survey_output def plot_profiles(profile_info, profile, output_loc, LL_limit): #plot the profile. expects output from CC_split_profile YminorLocator=MultipleLocator(0.5) XminorLocator=MultipleLocator(5) fig,ax=plt.subplots(figsize=(8, 3)) num_plots=len(profile_info.keys())-1 colormap = plt.cm.jet ax.set_prop_cycle(cycler('color', [colormap(i) for i in np.linspace(0, 0.9, num_plots)])) max_y=0 for date in profile_info.keys(): if date!='info': plt.plot(profile_info[date]['Chainage'], profile_info[date]['Elevation'], label=date) try: if max([i for i in profile_info[date]['Elevation'] if pd.isnull(i)==False])>max_y: max_y=max([i for i in profile_info[date]['Elevation'] if pd.isnull(i)==False]) except: print("empty elevation section for %s" % date) plt.plot([LL_limit,LL_limit], [-1,max_y], 'r--', alpha=0.5, label="Landward Limit") plt.xlabel('Chainage (m)',weight='bold') plt.ylabel('Elevation (m AHD)',weight='bold') plt.legend(loc='upper right', bbox_to_anchor=(1.3,1)) plt.title(profile) plt.rcParams['font.size']=8 ax.set_ylim([-1,math.ceil(max_y)]) ax.xaxis.set_minor_locator(XminorLocator) ax.yaxis.set_minor_locator(YminorLocator) ax.xaxis.grid(True, which='minor', color='k', linestyle='-', alpha=0.3) ax.yaxis.grid(True,which='minor',color='k', linestyle='-', alpha=0.3) plt.grid(which='major', color='k', linestyle='-') today=datetime.datetime.now().date().strftime('%Y%m%d') plt.savefig(os.path.join(output_loc, '%s_%s.png' % (today, profile)),bbox_inches='tight',dpi=900) plt.clf() return None def CC_split_profile(file2read): # this reads the profile files and splits it into dates file_master=pd.read_csv(file2read) beach_original=file_master['Beach'].tolist() profile_original=file_master['Profile'].tolist() date_original=file_master['Date'].tolist() chainage_original=file_master['Chainage'].tolist() elevation_original=file_master['Elevation'].tolist() easting_original=file_master['Easting'].tolist() northing_original=file_master['Northing'].tolist() data={} i=0 #add info on the beach and profile number data['info']={'Profile':profile_original[0], 'Beach':beach_original[0]} date_now=date_original[0] while i2: #if there aren't enough available points don't do it volume=neilson_volumes.volume_available(chainage, elevation, LL_limit) if volume<0: volume=0 print('%s %s has a negative volume available' % (profile, date)) else: volume=0 results_volume[profile][date]=volume #write an excel sheet which summarises the data workbook = xlsxwriter.Workbook(output_xlsx) worksheet=workbook.add_worksheet('Volumes') row=0 col=0 worksheet.write(row, col, 'Profile') for date in all_dates: col=col+1 worksheet.write(row, col, date) col=0 row=1 for prof in results_volume.keys(): worksheet.write(row, col, prof) for date in all_dates: col=col+1 try: vol=results_volume[prof][date] except KeyError: print("error with profile %s on %s" % (prof, date)) vol=None worksheet.write(row, col, vol) col=0 row=row+1 return results_volume def remove_temp_files(directory): for f in os.listdir(directory): os.unlink(os.path.join(directory, f)) return None input_file = 'Parameter Files/las-manipulation-survey-2.xlsx' params_file=pd.read_excel(input_file, sheet_name="PARAMS") for i in range(0, len(params_file)): #0, len(params_file) print("Starting to process %s" % params_file['Beach'][i]) beach=params_file['Beach'][i] survey_date=params_file['SURVEY DATE'][i] original_las=params_file['INPUT LAS'][i] classified_las_dir=params_file['LAS CLASSIFIED FOLDER'][i] shp_swash_dir=params_file['SHP SWASH FOLDER'][i] crop_heatmap_poly=params_file['HEATMAP CROP POLY'][i] output_las_dir=params_file['LAS OUTPUT FOLDER'][i] zone_MGA=params_file['ZONE MGA'][i] output_poly_dir=params_file['SHP RASTER FOLDER'][i] output_tif_dir=params_file['TIF OUTPUT FOLDER'][i] cp_csv=params_file['INPUT CSV'][i] profile_limit_file=params_file['PROFILE LIMIT FILE'][i] csv_output_dir=params_file['CSV OUTPUT FOLDER'][i] graph_loc = params_file['PNG OUTPUT FOLDER'][i] volume_output=params_file['CSV VOLUMES FOLDER'][i] tmp_dir=params_file['TMP FOLDER'][i] # Get base name of input las las_basename = os.path.splitext(os.path.basename(original_las))[0] # Get name of input point cloud input_las = os.path.join(classified_las_dir, las_basename + '.las') # Get name of swash cropping polygon crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') # Crop point cloud to swash boundary las_data = call_lastools('lasclip', input=input_las, output='-stdout', args=['-poly', crop_swash_poly], verbose=False) # crop_las(input_las5, crop_swash_poly, final_las, path_2_lastools) # Apply sea-side clipping polygon las_data = call_lastools('lasclip', input=las_data, output='-stdout', args=['-poly', crop_heatmap_poly], verbose=False) # crop_las(final_las, heatmap_crop_poly, heatmap_las, path_2_lastools) # Create clipping polygon for heatmap raster shp_name = os.path.join(output_poly_dir, las_basename + '.shp') call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) # las_boundary(heatmap_las, output_poly_name, output_poly_dir, path_2_lastools, zone_MGA) #make a raster # make_raster(heatmap_las, output_raster, path_2_lastools, keep_only_ground=True) tif_name = os.path.join(output_tif_dir, las_basename + '.tif') call_lastools('blast2dem', input=las_data, output=tif_name, args=['-step', 0.2], verbose=False) #extract the points and get volumes df = extract_pts( las_data, cp_csv, survey_date, beach, args=['-parse', 'sxyz', '-keep_class', '2'], verbose=False) update_survey_output(df, csv_output_dir) #colourise the point cloud #delete the temp files from the tmp_dir and the interim_dir remove_temp_files(tmp_dir) #remove_temp_files(int_dir) print("doing the volume analysis") test=profile_plots_volume(csv_output_dir, profile_limit_file, volume_output, graph_loc)