Compare commits
15 Commits
etta-drone
...
master
Author | SHA1 | Date |
---|---|---|
chrisd | e9b95d3a85 | 5 years ago |
chrisd | d22acb6e09 | 5 years ago |
chrisd | 5edc0b0aa6 | 5 years ago |
chrisd | 3e484f9f2f | 5 years ago |
Dan Howe | 7f046d84a5 | 5 years ago |
Dan Howe | 00791e5317 | 5 years ago |
Dan Howe | f6b2f8f38e | 5 years ago |
Dan Howe | e334ed888e | 5 years ago |
Dan Howe | cfba2b500a | 5 years ago |
Dan Howe | 793aad14c3 | 5 years ago |
Dan Howe | 4a31ae3f34 | 5 years ago |
chrisd | e59f3aedd1 | 5 years ago |
chrisd | 3220cf60ad | 5 years ago |
chrisd | 993296cef8 | 5 years ago |
chrisd | 82f22df890 | 5 years ago |
@ -1,459 +1,459 @@
|
|||||||
"""las_manipulation.py
|
"""las_manipulation.py
|
||||||
Clip, classify, and detect swash zone for an input las file.
|
Clip, classify, and detect swash zone for an input las file.
|
||||||
|
|
||||||
Example usage:
|
Example usage:
|
||||||
|
|
||||||
# Process single survey at specific beach
|
# Process single survey at specific beach
|
||||||
python las_manipulation.py survey-1-avoca.yaml
|
python las_manipulation.py survey-1-avoca.yaml
|
||||||
|
|
||||||
# Process single survey at multiple beaches
|
# Process single survey at multiple beaches
|
||||||
python las_manipulation.py survey-1-avoca.yaml survey-1-pearl.yaml
|
python las_manipulation.py survey-1-avoca.yaml survey-1-pearl.yaml
|
||||||
|
|
||||||
# Process all surveys at specific beach
|
# Process all surveys at specific beach
|
||||||
python las_manipulation.py *avoca.yaml
|
python las_manipulation.py *avoca.yaml
|
||||||
|
|
||||||
# Process all beaches for specific survey date
|
# Process all beaches for specific survey date
|
||||||
python las_manipulation.py survey-1*.yaml
|
python las_manipulation.py survey-1*.yaml
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
import argparse
|
import argparse
|
||||||
import subprocess
|
import subprocess
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy import interpolate
|
from scipy import interpolate
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
from shapely.geometry import Point, Polygon
|
from shapely.geometry import Point, Polygon
|
||||||
|
|
||||||
from survey_tools import call_lastools
|
from survey_tools import call_lastools
|
||||||
|
|
||||||
|
|
||||||
def remove_problems(x_list, y_list, z_list, x_now, y_now, check_value):
|
def remove_problems(x_list, y_list, z_list, x_now, y_now, check_value):
|
||||||
|
|
||||||
z_ave=nine_pt_moving_average(z_list)
|
z_ave=nine_pt_moving_average(z_list)
|
||||||
deriv_ave, chainage=forward_derivative(x_list, y_list, z_ave)
|
deriv_ave, chainage=forward_derivative(x_list, y_list, z_ave)
|
||||||
deriv_real, chainage=two_point_derivative(x_list, y_list, z_list)
|
deriv_real, chainage=two_point_derivative(x_list, y_list, z_list)
|
||||||
|
|
||||||
#first find the reference contour on the beach
|
#first find the reference contour on the beach
|
||||||
#index_contour, x_now, y_now, distance=find_beach_reference_contour_choose_closest(chainage, z_ave, x_list, y_list, x_now, y_now,deriv_ave, check_value)
|
#index_contour, x_now, y_now, distance=find_beach_reference_contour_choose_closest(chainage, z_ave, x_list, y_list, x_now, y_now,deriv_ave, check_value)
|
||||||
index_contour, x_now, y_now, distance=find_beach_reference_contour(chainage, z_ave, x_list, y_list, x_now, y_now,deriv_ave,deriv_real,check_value)
|
index_contour, x_now, y_now, distance=find_beach_reference_contour(chainage, z_ave, x_list, y_list, x_now, y_now,deriv_ave,deriv_real,check_value)
|
||||||
if index_contour<len(chainage): #other wise keep everthing
|
if index_contour<len(chainage): #other wise keep everthing
|
||||||
#find the beach slope, get the interpolated line (beach line) and the index of the reference contour +1
|
#find the beach slope, get the interpolated line (beach line) and the index of the reference contour +1
|
||||||
beach_slope, beach_line, index_high=find_beach_slope(chainage, z_ave,index_contour, check_value)
|
beach_slope, beach_line, index_high=find_beach_slope(chainage, z_ave,index_contour, check_value)
|
||||||
#find the natural deviation of the lower beach
|
#find the natural deviation of the lower beach
|
||||||
nat_dev=get_natural_deviation(chainage, z_list, index_contour, index_high, beach_line)
|
nat_dev=get_natural_deviation(chainage, z_list, index_contour, index_high, beach_line)
|
||||||
|
|
||||||
for i in range(index_contour, len(z_list)):
|
for i in range(index_contour, len(z_list)):
|
||||||
if abs(z_list[i]-float(beach_line(chainage[i])))>nat_dev:
|
if abs(z_list[i]-float(beach_line(chainage[i])))>nat_dev:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
i=index_contour
|
i=index_contour
|
||||||
|
|
||||||
z_return=z_list[0:i]
|
z_return=z_list[0:i]
|
||||||
chainage_return=chainage[0:i]
|
chainage_return=chainage[0:i]
|
||||||
return z_return, chainage_return, x_now, y_now, distance
|
return z_return, chainage_return, x_now, y_now, distance
|
||||||
|
|
||||||
def two_point_derivative(x_list, y_list, z_list):
|
def two_point_derivative(x_list, y_list, z_list):
|
||||||
chain=[((x_list[0]-x_list[i])**2+(y_list[0]-y_list[i])**2)**0.5 for i in range(0,len(x_list))]
|
chain=[((x_list[0]-x_list[i])**2+(y_list[0]-y_list[i])**2)**0.5 for i in range(0,len(x_list))]
|
||||||
deriv=[(z_list[i+1]-z_list[i-1])/(chain[i+1]-chain[i-1]) for i in range(1, len(z_list)-1)]
|
deriv=[(z_list[i+1]-z_list[i-1])/(chain[i+1]-chain[i-1]) for i in range(1, len(z_list)-1)]
|
||||||
|
|
||||||
deriv.insert(0,0)
|
deriv.insert(0,0)
|
||||||
deriv.append(0)
|
deriv.append(0)
|
||||||
|
|
||||||
return deriv, chain
|
return deriv, chain
|
||||||
|
|
||||||
def forward_derivative(x_list, y_list, z_list):
|
def forward_derivative(x_list, y_list, z_list):
|
||||||
chain=[((x_list[0]-x_list[i])**2+(y_list[0]-y_list[i])**2)**0.5 for i in range(0,len(x_list))]
|
chain=[((x_list[0]-x_list[i])**2+(y_list[0]-y_list[i])**2)**0.5 for i in range(0,len(x_list))]
|
||||||
deriv=[(z_list[i]-z_list[i-1])/(chain[i]-chain[i-1]) for i in range(0, len(z_list)-1)]
|
deriv=[(z_list[i]-z_list[i-1])/(chain[i]-chain[i-1]) for i in range(0, len(z_list)-1)]
|
||||||
|
|
||||||
deriv.insert(0,0)
|
deriv.insert(0,0)
|
||||||
|
|
||||||
return deriv, chain
|
return deriv, chain
|
||||||
|
|
||||||
def find_first_over_reference(z_list, value):
|
def find_first_over_reference(z_list, value):
|
||||||
i=len(z_list)-1
|
i=len(z_list)-1
|
||||||
|
|
||||||
while i>0 and z_list[i]<value:
|
while i>0 and z_list[i]<value:
|
||||||
i=i-1
|
i=i-1
|
||||||
|
|
||||||
return i
|
return i
|
||||||
|
|
||||||
def nine_pt_moving_average(z_list):
|
def nine_pt_moving_average(z_list):
|
||||||
i=0
|
i=0
|
||||||
move_ave=[]
|
move_ave=[]
|
||||||
while i<len(z_list):
|
while i<len(z_list):
|
||||||
if i<5:
|
if i<5:
|
||||||
ave=np.mean([z_list[j] for j in range(0,i+5)])
|
ave=np.mean([z_list[j] for j in range(0,i+5)])
|
||||||
elif i>len(z_list)-5:
|
elif i>len(z_list)-5:
|
||||||
ave=np.mean([z_list[j] for j in range(i-4,len(z_list))])
|
ave=np.mean([z_list[j] for j in range(i-4,len(z_list))])
|
||||||
else:
|
else:
|
||||||
ave=np.mean([z_list[j] for j in range(i-4,i+5)])
|
ave=np.mean([z_list[j] for j in range(i-4,i+5)])
|
||||||
|
|
||||||
move_ave.append(ave)
|
move_ave.append(ave)
|
||||||
i=i+1
|
i=i+1
|
||||||
return move_ave
|
return move_ave
|
||||||
|
|
||||||
def find_neg_derivative(z_list, deriv_list):
|
def find_neg_derivative(z_list, deriv_list):
|
||||||
i=len(z_list)-5
|
i=len(z_list)-5
|
||||||
while z_list[i]>=0 and z_list[i+1]>=0 and z_list[i+2]>=0 and z_list[i+3]>=0 and z_list[i+4]>=0:
|
while z_list[i]>=0 and z_list[i+1]>=0 and z_list[i+2]>=0 and z_list[i+3]>=0 and z_list[i+4]>=0:
|
||||||
i=i-1
|
i=i-1
|
||||||
|
|
||||||
return i
|
return i
|
||||||
|
|
||||||
def find_beach_reference_contour_choose_closest(chain_list, z_ave_list, x_list, y_list, x_last, y_last, deriv_ave_list, check_value):
|
def find_beach_reference_contour_choose_closest(chain_list, z_ave_list, x_list, y_list, x_last, y_last, deriv_ave_list, check_value):
|
||||||
#note that z_list should be the 9 point moving average
|
#note that z_list should be the 9 point moving average
|
||||||
#assumes that beaches are shallow (|derivative|<0.3), sloping and between 0 - 4 m AHD
|
#assumes that beaches are shallow (|derivative|<0.3), sloping and between 0 - 4 m AHD
|
||||||
i=0
|
i=0
|
||||||
choice_list=[]
|
choice_list=[]
|
||||||
distance_list=[]
|
distance_list=[]
|
||||||
if z_ave_list[i]>check_value:
|
if z_ave_list[i]>check_value:
|
||||||
state_now='over'
|
state_now='over'
|
||||||
else:
|
else:
|
||||||
state_now='under'
|
state_now='under'
|
||||||
|
|
||||||
while i<len(z_ave_list):
|
while i<len(z_ave_list):
|
||||||
if state_now=='under' and z_ave_list[i]>check_value: #only keep if it is downward sloping
|
if state_now=='under' and z_ave_list[i]>check_value: #only keep if it is downward sloping
|
||||||
state_now='over'
|
state_now='over'
|
||||||
elif state_now=='over' and z_ave_list[i]<check_value:
|
elif state_now=='over' and z_ave_list[i]<check_value:
|
||||||
choice_list.append(i)
|
choice_list.append(i)
|
||||||
state_now='under'
|
state_now='under'
|
||||||
if x_last!=None:
|
if x_last!=None:
|
||||||
distance_list.append(((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5)
|
distance_list.append(((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5)
|
||||||
i=i+1
|
i=i+1
|
||||||
|
|
||||||
if len(choice_list)>0 and x_last==None: #choose the first time for the first point
|
if len(choice_list)>0 and x_last==None: #choose the first time for the first point
|
||||||
i=choice_list[0]
|
i=choice_list[0]
|
||||||
distance=0
|
distance=0
|
||||||
elif len(choice_list)>0 and x_last!=None:
|
elif len(choice_list)>0 and x_last!=None:
|
||||||
assert(len(choice_list)==len(distance_list))
|
assert(len(choice_list)==len(distance_list))
|
||||||
i=choice_list[distance_list.index(min(distance_list))]
|
i=choice_list[distance_list.index(min(distance_list))]
|
||||||
distance=min(distance_list)
|
distance=min(distance_list)
|
||||||
|
|
||||||
|
|
||||||
if i>=len(x_list):
|
if i>=len(x_list):
|
||||||
i=len(x_list)-1
|
i=len(x_list)-1
|
||||||
if x_last!=None:
|
if x_last!=None:
|
||||||
distance=((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5
|
distance=((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5
|
||||||
else:
|
else:
|
||||||
distance=0
|
distance=0
|
||||||
|
|
||||||
x=x_list[i]
|
x=x_list[i]
|
||||||
y=y_list[i]
|
y=y_list[i]
|
||||||
|
|
||||||
return i, x, y, distance
|
return i, x, y, distance
|
||||||
|
|
||||||
def find_beach_reference_contour(chain_list, z_ave_list, x_list, y_list, x_last, y_last, deriv_ave_list,deriv_real_list, check_value):
|
def find_beach_reference_contour(chain_list, z_ave_list, x_list, y_list, x_last, y_last, deriv_ave_list,deriv_real_list, check_value):
|
||||||
#note that z_list should be the 9 point moving average
|
#note that z_list should be the 9 point moving average
|
||||||
#assumes that beaches are shallow (|derivative|<0.3), sloping and between 0 - 4 m AHD
|
#assumes that beaches are shallow (|derivative|<0.3), sloping and between 0 - 4 m AHD
|
||||||
|
|
||||||
i=len(z_ave_list)-1
|
i=len(z_ave_list)-1
|
||||||
while i>=0 and (z_ave_list[i]>check_value+2 or z_ave_list[i]<check_value-2 or deriv_ave_list[i]>0 or max([abs(i) for i in deriv_real_list[max(0,i-7):i]]+[0])>0.3):#beaches are shallow sloping, low
|
while i>=0 and (z_ave_list[i]>check_value+2 or z_ave_list[i]<check_value-2 or deriv_ave_list[i]>0 or max([abs(i) for i in deriv_real_list[max(0,i-7):i]]+[0])>0.3):#beaches are shallow sloping, low
|
||||||
i=i-1
|
i=i-1
|
||||||
|
|
||||||
#find the first time it gets to check_value after this
|
#find the first time it gets to check_value after this
|
||||||
while i>=0 and z_ave_list[i]<check_value:
|
while i>=0 and z_ave_list[i]<check_value:
|
||||||
i=i-1
|
i=i-1
|
||||||
|
|
||||||
if i==0:
|
if i==0:
|
||||||
i=len(z_ave_list)-1 # the whole this is above the beach
|
i=len(z_ave_list)-1 # the whole this is above the beach
|
||||||
|
|
||||||
if x_last!=None:
|
if x_last!=None:
|
||||||
distance=((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5
|
distance=((x_last - x_list[i])**2+(y_last - y_list[i])**2)**0.5
|
||||||
else:
|
else:
|
||||||
distance=0
|
distance=0
|
||||||
x=x_list[i]
|
x=x_list[i]
|
||||||
y=y_list[i]
|
y=y_list[i]
|
||||||
|
|
||||||
return i, x, y, distance
|
return i, x, y, distance
|
||||||
|
|
||||||
|
|
||||||
def find_beach_slope(chain_list, z_ave_list, ref_index, check_value):
|
def find_beach_slope(chain_list, z_ave_list, ref_index, check_value):
|
||||||
#ref index is the index of the check value
|
#ref index is the index of the check value
|
||||||
#find the beach slope between this point and 1 m above this point
|
#find the beach slope between this point and 1 m above this point
|
||||||
|
|
||||||
i=ref_index
|
i=ref_index
|
||||||
while i>0 and z_ave_list[i]<check_value+1:
|
while i>0 and z_ave_list[i]<check_value+1:
|
||||||
i=i-1
|
i=i-1
|
||||||
|
|
||||||
# Hide numpy floating point arithmetic warnings
|
# Hide numpy floating point arithmetic warnings
|
||||||
np.seterr(all='ignore')
|
np.seterr(all='ignore')
|
||||||
|
|
||||||
slope=(z_ave_list[i]-z_ave_list[ref_index])/(chain_list[i]-chain_list[ref_index])
|
slope=(z_ave_list[i]-z_ave_list[ref_index])/(chain_list[i]-chain_list[ref_index])
|
||||||
|
|
||||||
# Show numpy floating point arithmetic warnings
|
# Show numpy floating point arithmetic warnings
|
||||||
np.seterr(all=None)
|
np.seterr(all=None)
|
||||||
|
|
||||||
|
|
||||||
beach_ave=interpolate.interp1d([min(chain_list),max(chain_list)], [(min(chain_list)-chain_list[ref_index])*slope+z_ave_list[ref_index], (z_ave_list[ref_index]-(chain_list[ref_index]-max(chain_list))*slope)])
|
beach_ave=interpolate.interp1d([min(chain_list),max(chain_list)], [(min(chain_list)-chain_list[ref_index])*slope+z_ave_list[ref_index], (z_ave_list[ref_index]-(chain_list[ref_index]-max(chain_list))*slope)])
|
||||||
|
|
||||||
return slope, beach_ave, i
|
return slope, beach_ave, i
|
||||||
|
|
||||||
def get_natural_deviation(chain_list, z_list, ref_index, ref_high, beach_ave):
|
def get_natural_deviation(chain_list, z_list, ref_index, ref_high, beach_ave):
|
||||||
#for the points considered to be on the beach (reference contour to reference contour +1), find the average natural deviation
|
#for the points considered to be on the beach (reference contour to reference contour +1), find the average natural deviation
|
||||||
deviation=[]
|
deviation=[]
|
||||||
for i in range(ref_high, ref_index+1):
|
for i in range(ref_high, ref_index+1):
|
||||||
dev_tmp=abs(z_list[i] - float(beach_ave(chain_list[i])))
|
dev_tmp=abs(z_list[i] - float(beach_ave(chain_list[i])))
|
||||||
deviation.append(dev_tmp)
|
deviation.append(dev_tmp)
|
||||||
|
|
||||||
natural_deviation=min(np.max(deviation),0.4) #THIS MAY BE TOO CONSERVATIVE
|
natural_deviation=min(np.max(deviation),0.4) #THIS MAY BE TOO CONSERVATIVE
|
||||||
|
|
||||||
return natural_deviation
|
return natural_deviation
|
||||||
|
|
||||||
def distance_point_to_poly(x_list, y_list, x_now, y_now):
|
def distance_point_to_poly(x_list, y_list, x_now, y_now):
|
||||||
#make a line from the mid of x_list, y_list
|
#make a line from the mid of x_list, y_list
|
||||||
end=Point(x_list[-1], y_list[-1])
|
end=Point(x_list[-1], y_list[-1])
|
||||||
|
|
||||||
point=Point(x_now, y_now)
|
point=Point(x_now, y_now)
|
||||||
|
|
||||||
|
|
||||||
dist=point.distance(end)
|
dist=point.distance(end)
|
||||||
|
|
||||||
|
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
def polygon_wave_runup(xyz_1m, direction, shp_name, set_check_value, distance_check, zone):
|
def polygon_wave_runup(xyz_1m, direction, shp_name, set_check_value, distance_check, zone):
|
||||||
#print('starting processing of wave runup')
|
#print('starting processing of wave runup')
|
||||||
|
|
||||||
all_data=pd.read_csv(xyz_1m, header=None, names=['X','Y','Z'])
|
all_data=pd.read_csv(xyz_1m, header=None, names=['X','Y','Z'])
|
||||||
|
|
||||||
if direction=='north_south':
|
if direction=='north_south':
|
||||||
all_data_sorted=all_data.sort_values(by=['X', 'Y'], ascending=[1,0])
|
all_data_sorted=all_data.sort_values(by=['X', 'Y'], ascending=[1,0])
|
||||||
elif direction=='west_east':
|
elif direction=='west_east':
|
||||||
all_data_sorted=all_data.sort_values(by=['Y', 'X'], ascending=[0,1])
|
all_data_sorted=all_data.sort_values(by=['Y', 'X'], ascending=[0,1])
|
||||||
|
|
||||||
fixed_now=0
|
fixed_now=0
|
||||||
a=0
|
a=0
|
||||||
X_tmp=[]
|
X_tmp=[]
|
||||||
processed_data = pd.DataFrame(columns=['X','Y','Z'])
|
processed_data = pd.DataFrame(columns=['X','Y','Z'])
|
||||||
list_to_print=[10,20,30,40,50,60,70,80,90]
|
list_to_print=[10,20,30,40,50,60,70,80,90]
|
||||||
crop_line=[]
|
crop_line=[]
|
||||||
top_line=[]
|
top_line=[]
|
||||||
tmp_x_last=None
|
tmp_x_last=None
|
||||||
tmp_y_last=None
|
tmp_y_last=None
|
||||||
exceed_list=[]
|
exceed_list=[]
|
||||||
|
|
||||||
# Create progress bar
|
# Create progress bar
|
||||||
pbar = tqdm(all_data_sorted.iterrows(), total=all_data_sorted.shape[0])
|
pbar = tqdm(all_data_sorted.iterrows(), total=all_data_sorted.shape[0])
|
||||||
for index, line in pbar:
|
for index, line in pbar:
|
||||||
a=a+1
|
a=a+1
|
||||||
percent_done=round(a/len(all_data_sorted)*100,1)
|
percent_done=round(a/len(all_data_sorted)*100,1)
|
||||||
if percent_done in list_to_print:
|
if percent_done in list_to_print:
|
||||||
#print("Finished %s%% of the processing" % percent_done)
|
#print("Finished %s%% of the processing" % percent_done)
|
||||||
list_to_print=list_to_print[1:len(list_to_print)]
|
list_to_print=list_to_print[1:len(list_to_print)]
|
||||||
if direction=='north_south':
|
if direction=='north_south':
|
||||||
check_this=line['X']
|
check_this=line['X']
|
||||||
elif direction=='west_east':
|
elif direction=='west_east':
|
||||||
check_this=line['Y']
|
check_this=line['Y']
|
||||||
if check_this==fixed_now:
|
if check_this==fixed_now:
|
||||||
X_tmp.append(line['X'])
|
X_tmp.append(line['X'])
|
||||||
Y_tmp.append(line['Y'])
|
Y_tmp.append(line['Y'])
|
||||||
Z_tmp.append(line['Z'])
|
Z_tmp.append(line['Z'])
|
||||||
else:
|
else:
|
||||||
if len(X_tmp)!=0:
|
if len(X_tmp)!=0:
|
||||||
#try: ########may need to change!~!
|
#try: ########may need to change!~!
|
||||||
if len(X_tmp)>10:
|
if len(X_tmp)>10:
|
||||||
Z_set, chainage_tmp, temp_x, temp_y, distance=remove_problems(X_tmp, Y_tmp, Z_tmp,tmp_x_last, tmp_y_last, set_check_value)
|
Z_set, chainage_tmp, temp_x, temp_y, distance=remove_problems(X_tmp, Y_tmp, Z_tmp,tmp_x_last, tmp_y_last, set_check_value)
|
||||||
#except:
|
#except:
|
||||||
else:
|
else:
|
||||||
Z_set=Z_tmp
|
Z_set=Z_tmp
|
||||||
temp_x=X_tmp[len(Z_set)-1]
|
temp_x=X_tmp[len(Z_set)-1]
|
||||||
temp_y=Y_tmp[len(Z_set)-1]
|
temp_y=Y_tmp[len(Z_set)-1]
|
||||||
distance=0
|
distance=0
|
||||||
|
|
||||||
distance_2_old=distance_point_to_poly(X_tmp, Y_tmp, temp_x, temp_y)
|
distance_2_old=distance_point_to_poly(X_tmp, Y_tmp, temp_x, temp_y)
|
||||||
if distance_2_old<distance_check: # find a way to change so it is checking the distance from the first crop polyogn, concave_now.buffer(buffer)
|
if distance_2_old<distance_check: # find a way to change so it is checking the distance from the first crop polyogn, concave_now.buffer(buffer)
|
||||||
tmp_x_last=temp_x
|
tmp_x_last=temp_x
|
||||||
tmp_y_last=temp_y
|
tmp_y_last=temp_y
|
||||||
crop_line.append([X_tmp[len(Z_set)-1], Y_tmp[len(Z_set)-1]])
|
crop_line.append([X_tmp[len(Z_set)-1], Y_tmp[len(Z_set)-1]])
|
||||||
top_line.append([X_tmp[0], Y_tmp[0]])
|
top_line.append([X_tmp[0], Y_tmp[0]])
|
||||||
|
|
||||||
#otherwise crop by the distance_check
|
#otherwise crop by the distance_check
|
||||||
else:
|
else:
|
||||||
exceed_list.append(1)
|
exceed_list.append(1)
|
||||||
try:
|
try:
|
||||||
tmp_x_last=X_tmp[len(X_tmp)-distance_check] #beacuse this is a 1m DSM, this works
|
tmp_x_last=X_tmp[len(X_tmp)-distance_check] #beacuse this is a 1m DSM, this works
|
||||||
tmp_y_last=Y_tmp[len(Y_tmp)-distance_check]
|
tmp_y_last=Y_tmp[len(Y_tmp)-distance_check]
|
||||||
|
|
||||||
crop_line.append([tmp_x_last, tmp_y_last])
|
crop_line.append([tmp_x_last, tmp_y_last])
|
||||||
top_line.append([X_tmp[0], Y_tmp[0]])
|
top_line.append([X_tmp[0], Y_tmp[0]])
|
||||||
except:
|
except:
|
||||||
print('problem with the last crop point, keeping whole line')
|
print('problem with the last crop point, keeping whole line')
|
||||||
crop_line.append([X_tmp[-1], Y_tmp[-1]])
|
crop_line.append([X_tmp[-1], Y_tmp[-1]])
|
||||||
top_line.append([X_tmp[0], Y_tmp[0]])
|
top_line.append([X_tmp[0], Y_tmp[0]])
|
||||||
|
|
||||||
if direction=='north_south':
|
if direction=='north_south':
|
||||||
fixed_now=line['X']
|
fixed_now=line['X']
|
||||||
elif direction=='west_east':
|
elif direction=='west_east':
|
||||||
fixed_now=line['Y']
|
fixed_now=line['Y']
|
||||||
X_tmp=[line['X']]
|
X_tmp=[line['X']]
|
||||||
Y_tmp=[line['Y']]
|
Y_tmp=[line['Y']]
|
||||||
Z_tmp=[line['Z']]
|
Z_tmp=[line['Z']]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if direction=='north_south':
|
if direction=='north_south':
|
||||||
fixed_now=line['X']
|
fixed_now=line['X']
|
||||||
elif direction=='west_east':
|
elif direction=='west_east':
|
||||||
fixed_now=line['Y']
|
fixed_now=line['Y']
|
||||||
X_tmp=[line['X']]
|
X_tmp=[line['X']]
|
||||||
Y_tmp=[line['Y']]
|
Y_tmp=[line['Y']]
|
||||||
Z_tmp=[line['Z']]
|
Z_tmp=[line['Z']]
|
||||||
|
|
||||||
|
|
||||||
#for the last line
|
#for the last line
|
||||||
derivative, chainage=forward_derivative(X_tmp, Y_tmp, Z_tmp)
|
derivative, chainage=forward_derivative(X_tmp, Y_tmp, Z_tmp)
|
||||||
if len(X_tmp)>10:
|
if len(X_tmp)>10:
|
||||||
Z_set, chainage_tmp, temp_x, temp_y, distance=remove_problems(X_tmp, Y_tmp, Z_tmp,tmp_x_last, tmp_y_last, set_check_value)
|
Z_set, chainage_tmp, temp_x, temp_y, distance=remove_problems(X_tmp, Y_tmp, Z_tmp,tmp_x_last, tmp_y_last, set_check_value)
|
||||||
#except:
|
#except:
|
||||||
else:
|
else:
|
||||||
Z_set=Z_tmp
|
Z_set=Z_tmp
|
||||||
temp_x=X_tmp[len(Z_set)-1]
|
temp_x=X_tmp[len(Z_set)-1]
|
||||||
temp_y=Y_tmp[len(Z_set)-1]
|
temp_y=Y_tmp[len(Z_set)-1]
|
||||||
distance=0
|
distance=0
|
||||||
X_set=X_tmp[0:len(Z_set)]
|
X_set=X_tmp[0:len(Z_set)]
|
||||||
Y_set=Y_tmp[0:len(Z_set)]
|
Y_set=Y_tmp[0:len(Z_set)]
|
||||||
|
|
||||||
|
|
||||||
#write to new data frame
|
#write to new data frame
|
||||||
#if len(Z_set)>0:
|
#if len(Z_set)>0:
|
||||||
# for i in range(0, len(Z_set)):
|
# for i in range(0, len(Z_set)):
|
||||||
# processed_data =processed_data.append({'X':X_set[i],'Y':Y_set[i],'Z':Z_set[i],'r':r_set[i],'g':g_set[i],'b':b_set[i]}, ignore_index=True)
|
# processed_data =processed_data.append({'X':X_set[i],'Y':Y_set[i],'Z':Z_set[i],'r':r_set[i],'g':g_set[i],'b':b_set[i]}, ignore_index=True)
|
||||||
#add to crop line
|
#add to crop line
|
||||||
distance_2_old=distance_point_to_poly(X_tmp, Y_tmp, temp_x, temp_y)
|
distance_2_old=distance_point_to_poly(X_tmp, Y_tmp, temp_x, temp_y)
|
||||||
if distance_2_old<distance_check: # find a way to change so it is checking the distance from the first crop polyogn, concave_now.buffer(buffer)
|
if distance_2_old<distance_check: # find a way to change so it is checking the distance from the first crop polyogn, concave_now.buffer(buffer)
|
||||||
tmp_x_last=temp_x
|
tmp_x_last=temp_x
|
||||||
tmp_y_last=temp_y
|
tmp_y_last=temp_y
|
||||||
crop_line.append([X_tmp[len(Z_set)-1], Y_tmp[len(Z_set)-1]])
|
crop_line.append([X_tmp[len(Z_set)-1], Y_tmp[len(Z_set)-1]])
|
||||||
top_line.append([X_tmp[0], Y_tmp[0]])
|
top_line.append([X_tmp[0], Y_tmp[0]])
|
||||||
#otherwise crop by the distance_check
|
#otherwise crop by the distance_check
|
||||||
else:
|
else:
|
||||||
exceed_list.append(1)
|
exceed_list.append(1)
|
||||||
tmp_x_last=X_tmp[len(X_tmp)-distance_check]
|
tmp_x_last=X_tmp[len(X_tmp)-distance_check]
|
||||||
tmp_y_last=Y_tmp[len(Y_tmp)-distance_check]
|
tmp_y_last=Y_tmp[len(Y_tmp)-distance_check]
|
||||||
crop_line.append(tmp_x_last, tmp_y_last)
|
crop_line.append(tmp_x_last, tmp_y_last)
|
||||||
top_line.append([X_tmp[0], Y_tmp[0]])
|
top_line.append([X_tmp[0], Y_tmp[0]])
|
||||||
|
|
||||||
#otherwise dont add. straight line is better
|
#otherwise dont add. straight line is better
|
||||||
if direction=='north_south':
|
if direction=='north_south':
|
||||||
y_filtered=nine_pt_moving_average([i[1] for i in crop_line])
|
y_filtered=nine_pt_moving_average([i[1] for i in crop_line])
|
||||||
crop_new=[[crop_line[i][0],y_filtered[i]] for i in range(0, len(crop_line))]
|
crop_new=[[crop_line[i][0],y_filtered[i]] for i in range(0, len(crop_line))]
|
||||||
elif direction=='west_east':
|
elif direction=='west_east':
|
||||||
x_filtered=nine_pt_moving_average([i[0] for i in crop_line])
|
x_filtered=nine_pt_moving_average([i[0] for i in crop_line])
|
||||||
crop_new=[[x_filtered[i],crop_line[i][1]] for i in range(0, len(crop_line))]
|
crop_new=[[x_filtered[i],crop_line[i][1]] for i in range(0, len(crop_line))]
|
||||||
|
|
||||||
for_shape=crop_new+top_line[::-1]
|
for_shape=crop_new+top_line[::-1]
|
||||||
for_shape.append(crop_new[0])
|
for_shape.append(crop_new[0])
|
||||||
#print('exceeded the manual distance_check %s%% of the time. manually cropping undertaken' % (round(len(exceed_list)/a,2)*100))
|
#print('exceeded the manual distance_check %s%% of the time. manually cropping undertaken' % (round(len(exceed_list)/a,2)*100))
|
||||||
#making the cropping shapefile
|
#making the cropping shapefile
|
||||||
#print('making the crop polygon')
|
#print('making the crop polygon')
|
||||||
|
|
||||||
# Simplify polygon to remove invalid geometry
|
# Simplify polygon to remove invalid geometry
|
||||||
#geom = Polygon(for_shape).simplify(10)
|
#geom = Polygon(for_shape).simplify(10)
|
||||||
geom = Polygon(for_shape)
|
geom = Polygon(for_shape)
|
||||||
|
|
||||||
# Export polygon as shapefile
|
# Export polygon as shapefile
|
||||||
df = gpd.GeoDataFrame(geometry=[geom])
|
df = gpd.GeoDataFrame(geometry=[geom])
|
||||||
df.crs = {'init': 'epsg:283{}'.format(zone), 'no_defs': True}
|
df.crs = {'init': 'epsg:283{}'.format(zone), 'no_defs': True}
|
||||||
df.to_file(shp_name, driver='ESRI Shapefile')
|
df.to_file(shp_name, driver='ESRI Shapefile')
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def remove_temp_files(directory):
|
def remove_temp_files(directory):
|
||||||
for f in os.listdir(directory):
|
for f in os.listdir(directory):
|
||||||
os.unlink(os.path.join(directory, f))
|
os.unlink(os.path.join(directory, f))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def process(yaml_file):
|
def process(yaml_file):
|
||||||
with open(yaml_file, 'r') as f:
|
with open(yaml_file, 'r') as f:
|
||||||
params = yaml.safe_load(f)
|
params = yaml.safe_load(f)
|
||||||
|
|
||||||
print("Starting to process %s" % params['BEACH'])
|
print("Starting to process %s" % params['BEACH'])
|
||||||
input_las = params['INPUT LAS']
|
input_las = params['INPUT LAS']
|
||||||
initial_crop_poly = params['INITIAL CROP POLY']
|
initial_crop_poly = params['INITIAL CROP POLY']
|
||||||
lasground_step = params['LASGROUND STEP']
|
lasground_step = params['LASGROUND STEP']
|
||||||
zone_MGA = params['ZONE MGA']
|
zone_MGA = params['ZONE MGA']
|
||||||
check_value = params['CHECK VALUE']
|
check_value = params['CHECK VALUE']
|
||||||
direct = params['DIRECTION']
|
direct = params['DIRECTION']
|
||||||
check_distance = params['CHECK DISTANCE']
|
check_distance = params['CHECK DISTANCE']
|
||||||
las_dir = params['LAS CLASSIFIED FOLDER']
|
las_dir = params['LAS CLASSIFIED FOLDER']
|
||||||
shp_dir = params['SHP SWASH FOLDER']
|
shp_dir = params['SHP SWASH FOLDER']
|
||||||
tmp_dir = params['TMP FOLDER']
|
tmp_dir = params['TMP FOLDER']
|
||||||
survey_date=params['SURVEY DATE']
|
survey_date=params['SURVEY DATE']
|
||||||
beach=params['BEACH']
|
beach=params['BEACH']
|
||||||
|
|
||||||
# Get base name of input las
|
# Get base name of input las
|
||||||
#las_basename = os.path.splitext(os.path.basename(input_las))[0]
|
#las_basename = os.path.splitext(os.path.basename(input_las))[0]
|
||||||
las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date)
|
las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date)
|
||||||
|
|
||||||
# # Crop to beach boundary
|
# # Crop to beach boundary
|
||||||
print('Clipping...')
|
print('Clipping...')
|
||||||
las_clipped_name = os.path.join(tmp_dir, las_basename + '_clipped.las')
|
las_clipped_name = os.path.join(tmp_dir, las_basename + '_clipped.las')
|
||||||
call_lastools('lasclip', input=input_las, output=las_clipped_name,
|
call_lastools('lasclip', input=input_las, output=las_clipped_name,
|
||||||
args=['-poly', initial_crop_poly], verbose=False)
|
args=['-poly', initial_crop_poly], verbose=False)
|
||||||
|
|
||||||
# Classify ground points
|
# Classify ground points
|
||||||
print('Classifying ground...')
|
print('Classifying ground...')
|
||||||
las_classified_name = os.path.join(las_dir, las_basename + '.las')
|
las_classified_name = os.path.join(las_dir, las_basename + '.las')
|
||||||
call_lastools('lasground_new', input=las_clipped_name, output=las_classified_name,
|
call_lastools('lasground_new', input=las_clipped_name, output=las_classified_name,
|
||||||
args=['-step', lasground_step], verbose=False)
|
args=['-step', lasground_step], verbose=False)
|
||||||
|
|
||||||
# Interpolate point cloud onto a grid
|
# Interpolate point cloud onto a grid
|
||||||
print('Interpolating to grid...')
|
print('Interpolating to grid...')
|
||||||
xyz_name = os.path.join(tmp_dir, las_basename + '.xyz')
|
xyz_name = os.path.join(tmp_dir, las_basename + '.xyz')
|
||||||
call_lastools('las2dem', input=las_classified_name, output=xyz_name,
|
call_lastools('blast2dem', input=las_classified_name, output=xyz_name,
|
||||||
args=['-step', 1], verbose=False)
|
args=['-step', 1], verbose=False)
|
||||||
|
|
||||||
# Make runup clipping mask from gridded point cloud
|
# Make runup clipping mask from gridded point cloud
|
||||||
print('Calculating runup clipping mask...')
|
print('Calculating runup clipping mask...')
|
||||||
shp_name = os.path.join(shp_dir, las_basename + '.shp')
|
shp_name = os.path.join(shp_dir, las_basename + '.shp')
|
||||||
polygon_wave_runup(xyz_name, direct, shp_name, check_value, check_distance, zone_MGA)
|
polygon_wave_runup(xyz_name, direct, shp_name, check_value, check_distance, zone_MGA)
|
||||||
#NOTE THAT YOU NEED TO CHECK THE OUTPUT SHP FILE AND ADJUST AS REQUIRED
|
#NOTE THAT YOU NEED TO CHECK THE OUTPUT SHP FILE AND ADJUST AS REQUIRED
|
||||||
|
|
||||||
#delete the temp files
|
#delete the temp files
|
||||||
remove_temp_files(tmp_dir)
|
remove_temp_files(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
example_text = """examples:
|
example_text = """examples:
|
||||||
|
|
||||||
# Process single survey at specific beach
|
# Process single survey at specific beach
|
||||||
python las_manipulation.py survey-1-avoca.yaml
|
python las_manipulation.py survey-1-avoca.yaml
|
||||||
|
|
||||||
# Process single survey at multiple beaches
|
# Process single survey at multiple beaches
|
||||||
python las_manipulation.py survey-1-avoca.yaml survey-1-pearl.yaml
|
python las_manipulation.py survey-1-avoca.yaml survey-1-pearl.yaml
|
||||||
|
|
||||||
# Process all surveys at specific beach
|
# Process all surveys at specific beach
|
||||||
python las_manipulation.py *avoca.yaml
|
python las_manipulation.py *avoca.yaml
|
||||||
|
|
||||||
# Process all beaches for specific survey date
|
# Process all beaches for specific survey date
|
||||||
python las_manipulation.py survey-1*.yaml
|
python las_manipulation.py survey-1*.yaml
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Set up command line arguments
|
# Set up command line arguments
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
epilog=example_text,
|
epilog=example_text,
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
parser.add_argument('input', help='path to yaml file(s)', nargs='*')
|
parser.add_argument('input', help='path to yaml file(s)', nargs='*')
|
||||||
|
|
||||||
# Print usage if no arguments are provided
|
# Print usage if no arguments are provided
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
parser.print_help(sys.stderr)
|
parser.print_help(sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
yaml_files = []
|
yaml_files = []
|
||||||
[yaml_files.extend(glob(f)) for f in args.input]
|
[yaml_files.extend(glob(f)) for f in args.input]
|
||||||
|
|
||||||
for yaml_file in yaml_files:
|
for yaml_file in yaml_files:
|
||||||
process(yaml_file)
|
process(yaml_file)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
@ -1,341 +1,341 @@
|
|||||||
"""las_outputs.py
|
"""las_outputs.py
|
||||||
Crop swash zone, plot survey profiles, and complete a volume analysis based
|
Crop swash zone, plot survey profiles, and complete a volume analysis based
|
||||||
on the output from `las_manipulation.py`.
|
on the output from `las_manipulation.py`.
|
||||||
|
|
||||||
Example usage:
|
Example usage:
|
||||||
|
|
||||||
# Process single survey at specific beach
|
# Process single survey at specific beach
|
||||||
python las_outputs.py survey-1-avoca.yaml
|
python las_outputs.py survey-1-avoca.yaml
|
||||||
|
|
||||||
# Process single survey at multiple beaches
|
# Process single survey at multiple beaches
|
||||||
python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml
|
python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml
|
||||||
|
|
||||||
# Process all surveys at specific beach
|
# Process all surveys at specific beach
|
||||||
python las_outputs.py *avoca.yaml
|
python las_outputs.py *avoca.yaml
|
||||||
|
|
||||||
# Process all beaches for specific survey date
|
# Process all beaches for specific survey date
|
||||||
python las_outputs.py survey-1*.yaml
|
python las_outputs.py survey-1*.yaml
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import io
|
import io
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import math
|
import math
|
||||||
import yaml
|
import yaml
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
import subprocess
|
import subprocess
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from cycler import cycler
|
from cycler import cycler
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from matplotlib.ticker import MultipleLocator
|
from matplotlib.ticker import MultipleLocator
|
||||||
|
|
||||||
import nielsen_volumes
|
import nielsen_volumes
|
||||||
from survey_tools import call_lastools, extract_pts, update_survey_output
|
from survey_tools import call_lastools, extract_pts, update_survey_output
|
||||||
|
|
||||||
|
|
||||||
def get_datestring(x):
|
def get_datestring(x):
|
||||||
"""Format a date integer into an ISO-8601 date string
|
"""Format a date integer into an ISO-8601 date string
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
x: unformatted date
|
x: unformatted date
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
formatted date string
|
formatted date string
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
>> get_datestring(19700101)
|
>> get_datestring(19700101)
|
||||||
'1970-01-01'
|
'1970-01-01'
|
||||||
"""
|
"""
|
||||||
datestr = pd.datetime.strptime(str(x), '%Y%m%d').strftime('%Y-%m-%d')
|
datestr = pd.datetime.strptime(str(x), '%Y%m%d').strftime('%Y-%m-%d')
|
||||||
|
|
||||||
return datestr
|
return datestr
|
||||||
|
|
||||||
|
|
||||||
def remove_temp_files(directory):
|
def remove_temp_files(directory):
|
||||||
for f in os.listdir(directory):
|
for f in os.listdir(directory):
|
||||||
os.unlink(os.path.join(directory, f))
|
os.unlink(os.path.join(directory, f))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol, survey_date, scale_figures=False):
|
def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol, survey_date, scale_figures=False):
|
||||||
csv_name = profile_name + '.csv'
|
csv_name = profile_name + '.csv'
|
||||||
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name))
|
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name))
|
||||||
|
|
||||||
# Remove metadata, and extract profile coordinates
|
# Remove metadata, and extract profile coordinates
|
||||||
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
|
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
|
||||||
|
|
||||||
# Determine if current survey is the latest
|
# Determine if current survey is the latest
|
||||||
current_survey_col = 'Elevation_' + survey_date
|
current_survey_col = 'Elevation_' + survey_date
|
||||||
is_latest = profiles.columns[-1] == current_survey_col
|
is_latest = profiles.columns[-1] == current_survey_col
|
||||||
|
|
||||||
# Only plot profiles up to current survey date
|
# Only plot profiles up to current survey date
|
||||||
profiles = profiles.loc[:, :current_survey_col]
|
profiles = profiles.loc[:, :current_survey_col]
|
||||||
|
|
||||||
# Find landward limit of profile (behind beach)
|
# Find landward limit of profile (behind beach)
|
||||||
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
|
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
|
||||||
|
|
||||||
# Set figure dimensions based on beach size
|
# Set figure dimensions based on beach size
|
||||||
vertical_exag = 5
|
vertical_exag = 5
|
||||||
m_per_inch = 8
|
m_per_inch = 8
|
||||||
try:
|
try:
|
||||||
fig_h = profiles.dropna().values.max() / m_per_inch * vertical_exag
|
fig_h = profiles.dropna().values.max() / m_per_inch * vertical_exag
|
||||||
fig_w = (profiles.index.max() - ch_min) / m_per_inch
|
fig_w = (profiles.index.max() - ch_min) / m_per_inch
|
||||||
except ValueError:
|
except ValueError:
|
||||||
fig_h = 5
|
fig_h = 2.3
|
||||||
fig_w = 10
|
fig_w = 10
|
||||||
|
|
||||||
if scale_figures:
|
if scale_figures:
|
||||||
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
|
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
|
||||||
ax.set_aspect(vertical_exag)
|
ax.set_aspect(vertical_exag)
|
||||||
else:
|
else:
|
||||||
fig, ax = plt.subplots(figsize=(10, 5))
|
fig, ax = plt.subplots(figsize=(10, 2.3))
|
||||||
|
|
||||||
|
|
||||||
for col in profiles.columns:
|
for col in profiles.columns:
|
||||||
profile = profiles.loc[:, col]
|
profile = profiles.loc[:, col]
|
||||||
date_str = col.split('_')[-1]
|
date_str = col.split('_')[-1]
|
||||||
date = get_datestring(date_str)
|
date = get_datestring(date_str)
|
||||||
|
|
||||||
ax.plot(profile.index, profile, label=date)
|
ax.plot(profile.index, profile, label=date)
|
||||||
|
|
||||||
# Show landward limit of volume calculations
|
# Show landward limit of volume calculations
|
||||||
ax.axvline(x=ch_min, color='#222222', linestyle='--', label='Landward limit')
|
ax.axvline(x=ch_min, color='#222222', linestyle='--', label='Landward limit')
|
||||||
|
|
||||||
ax.set_title(profile_name)
|
ax.set_title(profile_name)
|
||||||
ax.set_xlabel('Chainage (m)', labelpad=10)
|
ax.set_xlabel('Chainage (m)', labelpad=10)
|
||||||
ax.set_ylabel('Elevation (m AHD)', labelpad=10)
|
ax.set_ylabel('Elevation (m AHD)', labelpad=10)
|
||||||
|
|
||||||
Ylim=ax.get_ylim()[1]
|
Ylim=ax.get_ylim()[1]
|
||||||
|
|
||||||
if Ylim<10:
|
if Ylim<10:
|
||||||
ax.set_ylim([ax.get_ylim()[0], 10])
|
ax.set_ylim([ax.get_ylim()[0], 10])
|
||||||
|
|
||||||
# Remove empty space at left of figure
|
# Remove empty space at left of figure
|
||||||
try:
|
try:
|
||||||
ax.set_xlim(left=profile.first_valid_index() - 10)
|
ax.set_xlim(left=profile.first_valid_index() - 10)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Show most recent volume change
|
# Show most recent volume change
|
||||||
if delta_vol is not None:
|
if delta_vol is not None:
|
||||||
ax.annotate('Most recent\nvolume change:\n{:+0.1f} m$^3$/m'.format(delta_vol),
|
ax.annotate('Most recent\nvolume change:\n{:+0.1f} m$^3$/m'.format(delta_vol),
|
||||||
(0.05, 0.15), xycoords='axes fraction', fontsize=9,
|
(0.05, 0.15), xycoords='axes fraction', fontsize=9,
|
||||||
backgroundcolor='#ffffff', linespacing=1.5)
|
backgroundcolor='#ffffff', linespacing=1.5)
|
||||||
|
|
||||||
ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1)
|
ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1)
|
||||||
|
loc='lower left', bbox_to_anchor=(1.02,0))
|
||||||
ax.xaxis.set_minor_locator(MultipleLocator(5))
|
ax.xaxis.set_minor_locator(MultipleLocator(5))
|
||||||
ax.yaxis.set_minor_locator(MultipleLocator(0.5))
|
ax.yaxis.set_minor_locator(MultipleLocator(0.5))
|
||||||
ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3)
|
ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3)
|
||||||
ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3)
|
ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3)
|
||||||
|
|
||||||
# Save in folder with current date
|
# Save in folder with current date
|
||||||
png_dirs = [os.path.join(graph_loc, get_datestring(survey_date))]
|
png_dirs = [os.path.join(graph_loc, get_datestring(survey_date))]
|
||||||
if is_latest:
|
if is_latest:
|
||||||
# Save copy in'latest' if survey is most recent
|
# Save copy in'latest' if survey is most recent
|
||||||
png_dirs += [os.path.join(graph_loc, 'latest')]
|
png_dirs += [os.path.join(graph_loc, 'latest')]
|
||||||
|
|
||||||
for png_dir in png_dirs:
|
for png_dir in png_dirs:
|
||||||
# Prepare output directory
|
# Prepare output directory
|
||||||
try:
|
try:
|
||||||
os.makedirs(png_dir)
|
os.makedirs(png_dir)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
png_name = os.path.join(png_dir, profile_name + '.png')
|
png_name = os.path.join(png_dir, profile_name + '.png')
|
||||||
plt.savefig(png_name, bbox_inches='tight', dpi=300)
|
plt.savefig(png_name, bbox_inches='tight', dpi=300)
|
||||||
plt.close()
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
def calculate_volumes(profile_name, current_survey_date, previous_survey_date,
|
def calculate_volumes(profile_name, current_survey_date, previous_survey_date,
|
||||||
csv_output_dir, ch_limits, volume_output_dir):
|
csv_output_dir, ch_limits, volume_output_dir):
|
||||||
csv_prof = profile_name + '.csv'
|
csv_prof = profile_name + '.csv'
|
||||||
beach = re.search('.*(?=_\d)', profile_name).group()
|
beach = re.search('.*(?=_\d)', profile_name).group()
|
||||||
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof))
|
profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof))
|
||||||
|
|
||||||
# Remove metadata, and extract profile coordinates
|
# Remove metadata, and extract profile coordinates
|
||||||
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
|
profiles = profiles.loc[:, 'Chainage':].set_index('Chainage')
|
||||||
|
|
||||||
# Find landward limit of profile (behind beach)
|
# Find landward limit of profile (behind beach)
|
||||||
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
|
ch_min = ch_limits.loc[profile_name, 'Landward Limit']
|
||||||
|
|
||||||
# Open volume spreadsheet
|
# Open volume spreadsheet
|
||||||
csv_vol = os.path.join(volume_output_dir, 'volumes.csv')
|
csv_vol = os.path.join(volume_output_dir, 'volumes.csv')
|
||||||
try:
|
try:
|
||||||
volumes = pd.read_csv(csv_vol, index_col=0)
|
volumes = pd.read_csv(csv_vol, index_col=0)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
# Create new dataframe if csv does not exist
|
# Create new dataframe if csv does not exist
|
||||||
volumes = pd.DataFrame()
|
volumes = pd.DataFrame()
|
||||||
|
|
||||||
# Get Nielsen erosion volumes for current survey date
|
# Get Nielsen erosion volumes for current survey date
|
||||||
current_survey = 'Elevation_' + current_survey_date
|
current_survey = 'Elevation_' + current_survey_date
|
||||||
chainage = profiles.loc[:, current_survey].dropna().index
|
chainage = profiles.loc[:, current_survey].dropna().index
|
||||||
elevation = profiles.loc[:, current_survey].dropna().values
|
elevation = profiles.loc[:, current_survey].dropna().values
|
||||||
try:
|
try:
|
||||||
volume = nielsen_volumes.volume_available(chainage, elevation, ch_min)
|
volume = nielsen_volumes.volume_available(chainage, elevation, ch_min)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
volume = np.nan
|
volume = np.nan
|
||||||
|
|
||||||
# Update spreadsheet
|
# Update spreadsheet
|
||||||
volumes.loc[profile_name, 'Volume_' + current_survey_date] = volume
|
volumes.loc[profile_name, 'Volume_' + current_survey_date] = volume
|
||||||
|
|
||||||
# Save updated volumes spreadsheet
|
# Save updated volumes spreadsheet
|
||||||
volumes = volumes[volumes.columns.sort_values()]
|
volumes = volumes[volumes.columns.sort_values()]
|
||||||
volumes = volumes.sort_index()
|
volumes = volumes.sort_index()
|
||||||
volumes.to_csv(csv_vol)
|
volumes.to_csv(csv_vol)
|
||||||
|
|
||||||
# Get most recent volume difference for current profile
|
# Get most recent volume difference for current profile
|
||||||
current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date)
|
current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date)
|
||||||
previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date)
|
previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date)
|
||||||
|
|
||||||
if previous_date_idx < 0:
|
if previous_date_idx < 0:
|
||||||
# Return None if there is only one survey
|
# Return None if there is only one survey
|
||||||
delta_vol = None
|
delta_vol = None
|
||||||
else:
|
else:
|
||||||
previous_vol = volumes.loc[profile_name][previous_date_idx]
|
previous_vol = volumes.loc[profile_name][previous_date_idx]
|
||||||
current_vol = volumes.loc[profile_name][current_date_idx]
|
current_vol = volumes.loc[profile_name][current_date_idx]
|
||||||
delta_vol = current_vol - previous_vol
|
delta_vol = current_vol - previous_vol
|
||||||
|
|
||||||
return delta_vol
|
return delta_vol
|
||||||
|
|
||||||
|
|
||||||
def process(yaml_file):
|
def process(yaml_file):
|
||||||
with open(yaml_file, 'r') as f:
|
with open(yaml_file, 'r') as f:
|
||||||
params = yaml.safe_load(f)
|
params = yaml.safe_load(f)
|
||||||
|
|
||||||
print("Starting to process %s" % params['BEACH'])
|
print("Starting to process %s" % params['BEACH'])
|
||||||
beach = params['BEACH']
|
beach = params['BEACH']
|
||||||
survey_date = str(params['SURVEY DATE'])
|
survey_date = str(params['SURVEY DATE'])
|
||||||
survey_date_previous = str(params['PREVIOUS SURVEY DATE'])
|
survey_date_previous = str(params['PREVIOUS SURVEY DATE'])
|
||||||
original_las = params['INPUT LAS']
|
original_las = params['INPUT LAS']
|
||||||
classified_las_dir = params['LAS CLASSIFIED FOLDER']
|
classified_las_dir = params['LAS CLASSIFIED FOLDER']
|
||||||
shp_swash_dir = params['SHP SWASH FOLDER']
|
shp_swash_dir = params['SHP SWASH FOLDER']
|
||||||
crop_heatmap_poly = params['HEATMAP CROP POLY']
|
crop_heatmap_poly = params['HEATMAP CROP POLY']
|
||||||
output_las_dir = params['LAS OUTPUT FOLDER']
|
output_las_dir = params['LAS OUTPUT FOLDER']
|
||||||
zone_MGA = params['ZONE MGA']
|
zone_MGA = params['ZONE MGA']
|
||||||
output_poly_dir = params['SHP RASTER FOLDER']
|
output_poly_dir = params['SHP RASTER FOLDER']
|
||||||
output_tif_dir = params['TIF DEM FOLDER']
|
output_tif_dir = params['TIF DEM FOLDER']
|
||||||
cp_csv = params['INPUT CSV']
|
cp_csv = params['INPUT CSV']
|
||||||
profile_limit_file = params['PROFILE LIMIT FILE']
|
profile_limit_file = params['PROFILE LIMIT FILE']
|
||||||
csv_output_dir = params['CSV OUTPUT FOLDER']
|
csv_output_dir = params['CSV OUTPUT FOLDER']
|
||||||
graph_loc = params['PNG OUTPUT FOLDER']
|
graph_loc = params['PNG OUTPUT FOLDER']
|
||||||
volume_output_dir = params['CSV VOLUMES FOLDER']
|
volume_output_dir = params['CSV VOLUMES FOLDER']
|
||||||
tmp_dir = params['TMP FOLDER']
|
tmp_dir = params['TMP FOLDER']
|
||||||
|
|
||||||
# Get base name of input las
|
# Get base name of input las
|
||||||
#las_basename = os.path.splitext(os.path.basename(original_las))[0]
|
#las_basename = os.path.splitext(os.path.basename(original_las))[0]
|
||||||
|
|
||||||
las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date)
|
las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date)
|
||||||
|
|
||||||
# Get name of input point cloud
|
# Get name of input point cloud
|
||||||
input_las = os.path.join(classified_las_dir, las_basename + '.las')
|
input_las = os.path.join(classified_las_dir, las_basename + '.las')
|
||||||
|
|
||||||
# Get name of swash cropping polygon
|
# Get name of swash cropping polygon
|
||||||
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
|
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
|
||||||
|
|
||||||
# Crop point cloud to swash boundary
|
# Crop point cloud to swash boundary
|
||||||
print('Cropping swash...')
|
print('Cropping swash...')
|
||||||
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
|
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
|
||||||
args=['-poly', crop_swash_poly], verbose=False)
|
args=['-poly', crop_swash_poly], verbose=False)
|
||||||
|
|
||||||
# Export classified, clipped las for delivery to client
|
# Export classified, clipped las for delivery to client
|
||||||
las_name = os.path.join(output_las_dir, las_basename + '.las')
|
las_name = os.path.join(output_las_dir, las_basename + '.las')
|
||||||
with open (las_name, 'wb') as f:
|
with open (las_name, 'wb') as f:
|
||||||
f.write(las_data)
|
f.write(las_data)
|
||||||
|
|
||||||
# Apply sea-side clipping polygon
|
# Apply sea-side clipping polygon
|
||||||
print('Cropping back of beach...')
|
print('Cropping back of beach...')
|
||||||
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
|
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
|
||||||
args=['-poly', crop_heatmap_poly], verbose=False)
|
args=['-poly', crop_heatmap_poly], verbose=False)
|
||||||
|
|
||||||
|
|
||||||
# Create clipping polygon for heatmap raster
|
# Create clipping polygon for heatmap raster
|
||||||
print('Creating heat map cropping polygon...')
|
print('Creating heat map cropping polygon...')
|
||||||
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
|
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
|
||||||
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
|
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
|
||||||
|
|
||||||
# Make a raster from point cloud
|
# Make a raster from point cloud
|
||||||
print('Creating heat map raster...')
|
print('Creating heat map raster...')
|
||||||
tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
|
tif_name = os.path.join(output_tif_dir, las_basename + '_DEM.tif')
|
||||||
call_lastools('las2dem', input=las_data, output=tif_name,
|
call_lastools('las2dem', input=las_data, output=tif_name,
|
||||||
args=['-step', 1, '-keep_class', 2], verbose=False)
|
args=['-step', 1, '-keep_class', 2], verbose=False)
|
||||||
# IF THIS STEP ISN'T WORKING:
|
# IF THIS STEP ISN'T WORKING:
|
||||||
# might mean there are no data lines
|
# might mean there are no data lines
|
||||||
# trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001]
|
# trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001]
|
||||||
#call_lastools('las2dem', input=las_data, output=tif_name,
|
#call_lastools('las2dem', input=las_data, output=tif_name,
|
||||||
# args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001], verbose=False)
|
# args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001], verbose=False)
|
||||||
|
|
||||||
# Extract elevations along profiles from triangulated surface
|
# Extract elevations along profiles from triangulated surface
|
||||||
print('Extracting profile elevations...')
|
print('Extracting profile elevations...')
|
||||||
df = extract_pts(
|
df = extract_pts(
|
||||||
las_data,
|
las_data,
|
||||||
cp_csv,
|
cp_csv,
|
||||||
survey_date,
|
survey_date,
|
||||||
beach,
|
beach,
|
||||||
args=['-parse', 'sxyz', '-keep_class', '2'],
|
args=['-parse', 'sxyz', '-keep_class', '2'],
|
||||||
verbose=False)
|
verbose=False)
|
||||||
|
|
||||||
# Update survey profiles
|
# Update survey profiles
|
||||||
update_survey_output(df, csv_output_dir)
|
update_survey_output(df, csv_output_dir)
|
||||||
|
|
||||||
# Get landward limit of surveys
|
# Get landward limit of surveys
|
||||||
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
|
ch_limits = pd.read_excel(profile_limit_file, index_col='Profile')
|
||||||
|
|
||||||
# Plot profiles, and save sand volumes for current beach
|
# Plot profiles, and save sand volumes for current beach
|
||||||
print('Updating figures...')
|
print('Updating figures...')
|
||||||
profile_names = df['Profile'].unique()
|
profile_names = df['Profile'].unique()
|
||||||
for profile_name in profile_names:
|
for profile_name in profile_names:
|
||||||
delta_vol = calculate_volumes(profile_name, survey_date, survey_date_previous,
|
delta_vol = calculate_volumes(profile_name, survey_date, survey_date_previous,
|
||||||
csv_output_dir, ch_limits,
|
csv_output_dir, ch_limits,
|
||||||
volume_output_dir)
|
volume_output_dir)
|
||||||
plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits,
|
plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits,
|
||||||
delta_vol, survey_date)
|
delta_vol, survey_date)
|
||||||
|
|
||||||
# Remove temprary files
|
# Remove temprary files
|
||||||
remove_temp_files(tmp_dir)
|
remove_temp_files(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
example_text = """examples:
|
example_text = """examples:
|
||||||
|
|
||||||
# Process single survey at specific beach
|
# Process single survey at specific beach
|
||||||
python las_outputs.py survey-1-avoca.yaml
|
python las_outputs.py survey-1-avoca.yaml
|
||||||
|
|
||||||
# Process single survey at multiple beaches
|
# Process single survey at multiple beaches
|
||||||
python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml
|
python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml
|
||||||
|
|
||||||
# Process all surveys at specific beach
|
# Process all surveys at specific beach
|
||||||
python las_outputs.py *avoca.yaml
|
python las_outputs.py *avoca.yaml
|
||||||
|
|
||||||
# Process all beaches for specific survey date
|
# Process all beaches for specific survey date
|
||||||
python las_outputs.py survey-1*.yaml
|
python las_outputs.py survey-1*.yaml
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Set up command line arguments
|
# Set up command line arguments
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
epilog=example_text,
|
epilog=example_text,
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
parser.add_argument('input', help='path to yaml file(s)', nargs='*')
|
parser.add_argument('input', help='path to yaml file(s)', nargs='*')
|
||||||
|
|
||||||
# Print usage if no arguments are provided
|
# Print usage if no arguments are provided
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
parser.print_help(sys.stderr)
|
parser.print_help(sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Parse arguments
|
# Parse arguments
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
yaml_files = []
|
yaml_files = []
|
||||||
[yaml_files.extend(glob(f)) for f in args.input]
|
[yaml_files.extend(glob(f)) for f in args.input]
|
||||||
|
|
||||||
for yaml_file in yaml_files:
|
for yaml_file in yaml_files:
|
||||||
process(yaml_file)
|
process(yaml_file)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
@ -0,0 +1,92 @@
|
|||||||
|
"""polyline_to_points.py
|
||||||
|
Extract interpolated points along transects in a shapefile.
|
||||||
|
|
||||||
|
D. Howe
|
||||||
|
d.howe@wrl.unsw.edu.au
|
||||||
|
2020-02-19
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
from shapely.geometry import LineString
|
||||||
|
|
||||||
|
|
||||||
|
def extract(shp_path, spacing=1, field=None):
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
shp = gpd.read_file(shp_path)
|
||||||
|
|
||||||
|
if field is None:
|
||||||
|
# Assume profile name is second field in shapefile
|
||||||
|
field = shp.columns[1]
|
||||||
|
|
||||||
|
for i, line in shp.iterrows():
|
||||||
|
g = line.geometry
|
||||||
|
chainages = np.arange(0, g.length, step=spacing)
|
||||||
|
for chainage in chainages:
|
||||||
|
easting, northing = g.interpolate(chainage).xy
|
||||||
|
|
||||||
|
row = {
|
||||||
|
'ProfileNum': line[field],
|
||||||
|
'Easting': easting[0],
|
||||||
|
'Northing': northing[0],
|
||||||
|
'Chainage': chainage,
|
||||||
|
}
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
# Create output dataframe
|
||||||
|
df = pd.DataFrame(rows)
|
||||||
|
|
||||||
|
# Re-order columns
|
||||||
|
df = df[['ProfileNum', 'Easting', 'Northing', 'Chainage']]
|
||||||
|
|
||||||
|
# Export to csv
|
||||||
|
csv_path = shp_path.replace('.shp', '.csv')
|
||||||
|
df.to_csv(csv_path, index=False)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
example_text = """examples:
|
||||||
|
# Extract points at default spacing (1m)
|
||||||
|
$ python polyline_to_points.py path/to/shp
|
||||||
|
|
||||||
|
# Extract points at 5m increments
|
||||||
|
$ python polyline_to_points.py -s 5 path/to/shp
|
||||||
|
|
||||||
|
# Use profile names from field "ProfileID" in the attribute table
|
||||||
|
$ python polyline_to_points.py -f ProfileID path/to/shp
|
||||||
|
"""
|
||||||
|
# Set up command line arguments
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
epilog=example_text,
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('shp_path',
|
||||||
|
metavar='SHAPEFILE',
|
||||||
|
help='path to input shapefile')
|
||||||
|
parser.add_argument('-s',
|
||||||
|
'--spacing',
|
||||||
|
metavar='SPACING',
|
||||||
|
default=1,
|
||||||
|
type=int,
|
||||||
|
help='space between points (default=1)')
|
||||||
|
parser.add_argument('-f',
|
||||||
|
'--field',
|
||||||
|
metavar='FIELDNAME',
|
||||||
|
help='profile field name in attribute table')
|
||||||
|
|
||||||
|
# Print usage if no arguments are provided
|
||||||
|
if len(sys.argv) == 1:
|
||||||
|
parser.print_help(sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Combine images
|
||||||
|
extract(**vars(args))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
Loading…
Reference in New Issue