You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
central-coast-aerial-survey/outputs_2017088_Survey2.py

352 lines
14 KiB
Python

7 years ago
# python 3.5
#requires LAStools to be installed (with the appropriate license). Note that LAStools requires no spaces in file names
#should have previously run 2017088_las_manipulation to have a las that has the buildings and veg removed
#note that the neilson volumes script must be in the same folder
# this script will:
#crop to a given polygon (crop away the swash zone)
# extract values along a predefined profile,
# do the volume analysis
#export pngs of the surveys
########################### IMPORTS ###########################################
import os
import io
7 years ago
import subprocess
import pandas as pd
import numpy as np
import neilson_volumes
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
import datetime
import xlsxwriter
import math
from cycler import cycler
from survey_tools import call_lastools, extract_pts, update_survey_output
7 years ago
###############################################################################
########################## FIXED INPUTS #######################################
######### UNCOMMENT THIS SECTION IF YOU WANT TO DEFINE EACH INPUT INDIVIDUALLY ######################
##path to LasTools NOTE THERE CAN BE NO SPACES
path_2_lastools='C:/ProgramData/chocolatey'
# input_file=r"J:\Project\wrl2017088 Central Coast Council Aerial Survey and Coastal Analysis\04_Working\Python\Survey 2\Parameter Files\las outputs survey2.xlsx"
input_file='Parameter Files/las outputs survey2.xlsx'
#input_file=r"J:\Project\wrl2017088 Central Coast Council Aerial Survey and Coastal Analysis\04_Working\Python\Survey 2\Parameter Files\las outputs survey2_V2.xlsx"
##############################
############################### SUB ROUTINES ##################################
def check_output(command,console):
if console == True:
process = subprocess.Popen(command)
else:
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
output,error = process.communicate()
returncode = process.poll()
return returncode,output
def crop_las(las, shp, output, lastools_loc):
# output is the full path and filename (inc extension) to put in
path_2_lasclip=lastools_loc+"\\bin\\lasclip"
command="%s -i %s -poly %s -o %s" % (path_2_lasclip, las, shp, output)
returncode,output = check_output(command, False)
if returncode!= 0:
print("Error. lasclip failed on %s" % shp.split('//')[-1].split('.')[0])
else:
return None
def las_boundary(las, crop_poly_name, path_2_crop_polygon, lastools_loc, zone):
path_2_lasboundary=lastools_loc+"\\bin\\lasboundary"
fname=crop_poly_name
prjfname="%s%s.prj" %(path_2_crop_polygon, fname)
path_2_crop_poly='%s%s.shp' % (path_2_crop_polygon, fname)
command="%s -i %s -o %s" % (path_2_lasboundary, las, path_2_crop_poly)
prj=open(prjfname, 'w')
if zone==56:
prj.write('PROJCS["GDA_1994_MGA_Zone_56",GEOGCS["GCS_GDA_1994",DATUM["D_GDA_1994",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",10000000.0],PARAMETER["Central_Meridian",153.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]')
elif zone==55:
prj.write('PROJCS["GDA_1994_MGA_Zone_55",GEOGCS["GCS_GDA_1994",DATUM["D_GDA_1994",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",10000000.0],PARAMETER["Central_Meridian",147.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]')
elif zone==57:
prj.write('PROJCS["GDA_1994_MGA_Zone_57",GEOGCS["GCS_GDA_1994",DATUM["D_GDA_1994",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",10000000.0],PARAMETER["Central_Meridian",159.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]')
prj.close()
returncode,output = check_output(command, False)
return None
def make_raster(las, output, lastools_loc, keep_only_ground=False, step=0.2):
#not that keep ground only option only rasters points classified as "2" in the lidar (ie ground)
#this effectively creates a "bare earth dem"
#note that this should only be used after remove_veg and/or remove_buildings has been run
path_2_blastdem=lastools_loc+"\\bin\\blast2dem"
if keep_only_ground==False:
command="%s -i %s -o %s -step %s" % (path_2_blastdem, las, output, step)
else:
command="%s -i %s -o %s -step %s -keep_class 2" % (path_2_blastdem, las, output,step)
returncode,output2 = check_output(command, False)
if returncode!= 0:
print("Error. blast2dem failed on %s" % las.split('\\')[-1].split('.')[0])
else:
return None
def plot_profiles(profile_info, profile, output_loc, LL_limit):
#plot the profile. expects output from CC_split_profile
YminorLocator=MultipleLocator(0.5)
XminorLocator=MultipleLocator(5)
fig,ax=plt.subplots(figsize=(8, 3))
num_plots=len(profile_info.keys())-1
colormap = plt.cm.jet
ax.set_prop_cycle(cycler('color', [colormap(i) for i in np.linspace(0, 0.9, num_plots)]))
max_y=0
for date in profile_info.keys():
if date!='info':
plt.plot(profile_info[date]['Chainage'], profile_info[date]['Elevation'], label=date)
try:
if max([i for i in profile_info[date]['Elevation'] if pd.isnull(i)==False])>max_y:
max_y=max([i for i in profile_info[date]['Elevation'] if pd.isnull(i)==False])
except:
print("empty elevation section for %s" % date)
plt.plot([LL_limit,LL_limit], [-1,max_y], 'r--', alpha=0.5, label="Landward Limit")
plt.xlabel('Chainage (m)',weight='bold')
plt.ylabel('Elevation (m AHD)',weight='bold')
plt.legend(loc='upper right', bbox_to_anchor=(1.3,1))
plt.title(profile)
plt.rcParams['font.size']=8
ax.set_ylim([-1,math.ceil(max_y)])
ax.xaxis.set_minor_locator(XminorLocator)
ax.yaxis.set_minor_locator(YminorLocator)
ax.xaxis.grid(True, which='minor', color='k', linestyle='-', alpha=0.3)
ax.yaxis.grid(True,which='minor',color='k', linestyle='-', alpha=0.3)
plt.grid(which='major', color='k', linestyle='-')
today=datetime.datetime.now().date().strftime('%Y%m%d')
plt.savefig(os.path.join(output_loc, '%s_%s.png' % (today, profile)),bbox_inches='tight',dpi=900)
plt.clf()
return None
def CC_split_profile(file2read):
# this reads the profile files and splits it into dates
file_master=pd.read_csv(file2read)
beach_original=file_master['Beach'].tolist()
profile_original=file_master['Profile'].tolist()
date_original=file_master['Date'].tolist()
chainage_original=file_master['Chainage'].tolist()
elevation_original=file_master['Elevation'].tolist()
easting_original=file_master['Easting'].tolist()
northing_original=file_master['Northing'].tolist()
data={}
i=0
#add info on the beach and profile number
data['info']={'Profile':profile_original[0], 'Beach':beach_original[0]}
date_now=date_original[0]
while i<len(file_master):
chainage_tmp=[]
elevation_tmp=[]
easting_tmp=[]
northing_tmp=[]
while i<len(file_master) and date_now==date_original[i]:
chainage_tmp.append(chainage_original[i])
elevation_tmp.append(elevation_original[i])
easting_tmp.append(easting_original[i])
northing_tmp.append(northing_original[i])
i=i+1
data[date_now]={'Beach': beach_original[i-1], 'Profile':profile_original[i-1],'Easting': easting_tmp, 'Northing':northing_tmp, 'Elevation':elevation_tmp, 'Chainage':chainage_tmp}
if i<len(file_master):
date_now=date_original[i]
return data
def profile_plots_volume(csv_loc, LL_xlsx, output_xlsx, graph_location):
#get a list of all csvs which will each be analysed
file_list=[]
for file in os.listdir(csv_loc):
if file.endswith(".csv"):
file_list.append(os.path.join(csv_loc, file))
#now read the LL file
LL_limit_file=pd.read_excel(LL_xlsx, 'profile_locations')
LL_info={}
for i in range(0, len(LL_limit_file)):
#make a dictionary that alllows you to search the LL
prof="%s_%s" % (LL_limit_file['Profile'][i].split(" ")[0], LL_limit_file['Profile'][i].split(" ")[-1])
LL_info[prof]=LL_limit_file['Landward Limit'][i]
all_dates=[]
results_volume={}
for file in file_list:
#read the profile data - this should have all dates
profile_data=CC_split_profile(file)
profile=profile_data['info']['Profile']
#plot all of the profiles
print(profile)
plot_profiles(profile_data, profile, graph_location,LL_info[profile])
results_volume[profile]={}
#nowgo through each date and do a neilson volume calculations
for date in profile_data.keys():
if date!='info':
if date not in all_dates:
all_dates.append(date)
chainage=profile_data[date]['Chainage']
elevation=[0 if pd.isnull(profile_data[date]['Elevation'][i]) else profile_data[date]['Elevation'][i] for i in range(0, len(profile_data[date]['Elevation']))]
LL_limit=LL_info[profile]
#do a neilson calculation to get the ZSA volume
if len(elevation)>2:
#if there aren't enough available points don't do it
volume=neilson_volumes.volume_available(chainage, elevation, LL_limit)
if volume<0:
volume=0
print('%s %s has a negative volume available' % (profile, date))
else:
volume=0
results_volume[profile][date]=volume
#write an excel sheet which summarises the data
workbook = xlsxwriter.Workbook(output_xlsx)
worksheet=workbook.add_worksheet('Volumes')
row=0
col=0
worksheet.write(row, col, 'Profile')
for date in all_dates:
col=col+1
worksheet.write(row, col, date)
col=0
row=1
for prof in results_volume.keys():
worksheet.write(row, col, prof)
for date in all_dates:
col=col+1
try:
vol=results_volume[prof][date]
except KeyError:
print("error with profile %s on %s" % (prof, date))
vol=None
worksheet.write(row, col, vol)
col=0
row=row+1
return results_volume
def remove_temp_files(directory):
for f in os.listdir(directory):
os.unlink(os.path.join(directory, f))
return None
###############################################################################
########################### RUN CODE ##########################################
input_file = 'Parameter Files/las-manipulation-survey-2.xlsx'
7 years ago
params_file=pd.read_excel(input_file, sheet_name="PARAMS")
for i in range(0, len(params_file)): #0, len(params_file)
print("Starting to process %s" % params_file['Beach'][i])
beach=params_file['Beach'][i]
survey_date=params_file['SURVEY DATE'][i]
original_las=params_file['INPUT LAS'][i]
classified_las_dir=params_file['LAS CLASSIFIED FOLDER'][i]
shp_swash_dir=params_file['SHP SWASH FOLDER'][i]
crop_heatmap_poly=params_file['HEATMAP CROP POLY'][i]
output_las_dir=params_file['LAS OUTPUT FOLDER'][i]
7 years ago
zone_MGA=params_file['ZONE MGA'][i]
output_poly_dir=params_file['SHP RASTER FOLDER'][i]
output_tif_dir=params_file['TIF OUTPUT FOLDER'][i]
cp_csv=params_file['INPUT CSV'][i]
profile_limit_file=params_file['PROFILE LIMIT FILE'][i]
csv_output_dir=params_file['CSV OUTPUT FOLDER'][i]
graph_loc = params_file['PNG OUTPUT FOLDER'][i]
volume_output=params_file['CSV VOLUMES FOLDER'][i]
tmp_dir=params_file['TMP FOLDER'][i]
7 years ago
# Get base name of input las
las_basename = os.path.splitext(os.path.basename(original_las))[0]
# Get name of input point cloud
input_las = os.path.join(classified_las_dir, las_basename + '.las')
# Get name of swash cropping polygon
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
# Crop point cloud to swash boundary
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
args=['-poly', crop_swash_poly], verbose=False)
# crop_las(input_las5, crop_swash_poly, final_las, path_2_lastools)
# Apply sea-side clipping polygon
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
args=['-poly', crop_heatmap_poly], verbose=False)
# crop_las(final_las, heatmap_crop_poly, heatmap_las, path_2_lastools)
7 years ago
# Create clipping polygon for heatmap raster
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
# las_boundary(heatmap_las, output_poly_name, output_poly_dir, path_2_lastools, zone_MGA)
7 years ago
#make a raster
# make_raster(heatmap_las, output_raster, path_2_lastools, keep_only_ground=True)
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
call_lastools('blast2dem', input=input_las, output=tif_name,
args=['-step', 0.2], verbose=False)
7 years ago
#extract the points and get volumes
# df = extract_pts(final_las, cp_csv, survey_date, beach, keep_only_ground=True)
df = extract_pts(
las_data,
cp_csv,
survey_date,
beach,
args=['-parse', 'sxyz', '-keep_class', '2'],
verbose=False)
update_survey_output(df, csv_output_dir)
7 years ago
#colourise the point cloud
#delete the temp files from the tmp_dir and the interim_dir
remove_temp_files(tmp_dir)
#remove_temp_files(int_dir)
print("doing the volume analysis")
test=profile_plots_volume(csv_loc, LL_file, volume_output, graph_loc)