Update 'extract_pts' and create 'update_survey_output'
parent
eeb3ec33ad
commit
3a9e035a27
@ -0,0 +1,97 @@
|
|||||||
|
import os
|
||||||
|
import io
|
||||||
|
import subprocess
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
survey_date = '20180517'
|
||||||
|
beach = 'Avoca'
|
||||||
|
output_csv_dir = 'csv'
|
||||||
|
|
||||||
|
las_in = 'C:/Users/z3161860/Downloads/LASTools/XXFiles/S2_Delivery/avoca_20180517.las'
|
||||||
|
cp_in = 'C:/Users/z3161860/Downloads/LASTools/XXFiles/CC_Profiles/Avoca_profiles.csv'
|
||||||
|
|
||||||
|
|
||||||
|
def extract_pts(las_in, cp_in, survey_date, keep_only_ground=True):
|
||||||
|
"""Extract elevations from a las surface based on x and y coordinates.
|
||||||
|
|
||||||
|
Requires lastools in system path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
las_in: input point cloud (las)
|
||||||
|
cp_in: point coordinates with columns: id, x, y, z (csv)
|
||||||
|
survey_date: survey date string, e.g. '19700101'
|
||||||
|
keep_only_ground: only keep points classified as 'ground' (boolean)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dataframe containing input coordinates with extracted elevations
|
||||||
|
"""
|
||||||
|
|
||||||
|
cmd = ['lascontrol', '-i', las_in, '-cp', cp_in, '-parse', 'sxyz']
|
||||||
|
|
||||||
|
if keep_only_ground == True:
|
||||||
|
cmd += ['-keep_class', '2']
|
||||||
|
|
||||||
|
# Call lastools
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
stdout, stderr = process.communicate()
|
||||||
|
errcode = process.returncode
|
||||||
|
|
||||||
|
# Handle errors, if detected
|
||||||
|
if errcode != 0:
|
||||||
|
print("Error. lascontrol failed on {}".format(
|
||||||
|
os.path.basename(las_in)))
|
||||||
|
print(stderr.decode())
|
||||||
|
|
||||||
|
# Load result into pandas dataframe
|
||||||
|
df = pd.read_csv(io.BytesIO(stdout))
|
||||||
|
|
||||||
|
# Tidy up dataframe
|
||||||
|
df = df.drop(columns=['diff'])
|
||||||
|
df['lidar_z'] = pd.to_numeric(df['lidar_z'], errors='coerce')
|
||||||
|
df['Beach'] = beach
|
||||||
|
df = df[[
|
||||||
|
'Beach', 'ProfileNum', 'Easting', 'Northing', 'Chainage', 'lidar_z'
|
||||||
|
]]
|
||||||
|
|
||||||
|
# Rename columns
|
||||||
|
new_names = {
|
||||||
|
'ProfileNum': 'Profile',
|
||||||
|
'lidar_z': 'Elevation_{}'.format(survey_date),
|
||||||
|
}
|
||||||
|
df = df.rename(columns=new_names)
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def update_survey_output(df, output_dir):
|
||||||
|
"""Update survey profile output csv files with current survey.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
df: dataframe containing current survey elevations
|
||||||
|
output_dir: directory where csv files are saved
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
# Merge current survey with existing data
|
||||||
|
profiles = df['Profile'].unique()
|
||||||
|
for profile in profiles:
|
||||||
|
csv_name = os.path.join(output_csv_dir, profile + '.csv')
|
||||||
|
try:
|
||||||
|
# Load existing results
|
||||||
|
master = pd.read_csv(csv_name)
|
||||||
|
except FileNotFoundError:
|
||||||
|
master = df.copy()
|
||||||
|
|
||||||
|
# Add (or update) current survey
|
||||||
|
current_survey_col = df.columns[-1]
|
||||||
|
master[current_survey_col] = df[current_survey_col]
|
||||||
|
|
||||||
|
# Export updated results
|
||||||
|
master.to_csv(csv_name)
|
||||||
|
|
||||||
|
|
||||||
|
df = extract_pts(las_in, cp_in, survey_date, keep_only_ground=True)
|
||||||
|
update_survey_output(df, output_csv_dir)
|
Loading…
Reference in New Issue