From 83615467cbcf503b61082db23b07ef7bb23a60e3 Mon Sep 17 00:00:00 2001 From: Dan Howe Date: Tue, 24 Jul 2018 14:27:37 +1000 Subject: [PATCH] Use yaml files for loading input parameters --- las_manipulation.py | 131 +++++++++++++++++------------- las_outputs.py | 193 ++++++++++++++++++++++++-------------------- 2 files changed, 182 insertions(+), 142 deletions(-) diff --git a/las_manipulation.py b/las_manipulation.py index c9e7a2d..bd37188 100644 --- a/las_manipulation.py +++ b/las_manipulation.py @@ -1,7 +1,9 @@ import os import sys +import yaml import argparse import subprocess +from glob import glob from tqdm import tqdm import numpy as np from scipy import interpolate @@ -337,76 +339,95 @@ def polygon_wave_runup(xyz_1m, direction, shp_name, set_check_value, distance_ch return None + def remove_temp_files(directory): for f in os.listdir(directory): os.unlink(os.path.join(directory, f)) return None + +def process(yaml_file): + with open(yaml_file, 'r') as f: + params = yaml.safe_load(f) + + print("Starting to process %s" % params['BEACH']) + input_las = params['INPUT LAS'] + initial_crop_poly = params['INITIAL CROP POLY'] + lasground_step = params['LASGROUND STEP'] + zone_MGA = params['ZONE MGA'] + check_value = params['CHECK VALUE'] + direct = params['DIRECTION'] + check_distance = params['CHECK DISTANCE'] + las_dir = params['LAS CLASSIFIED FOLDER'] + shp_dir = params['SHP SWASH FOLDER'] + tmp_dir = params['TMP FOLDER'] + + # Get base name of input las + las_basename = os.path.splitext(os.path.basename(input_las))[0] + + # Crop to beach boundary + print('Clipping...') + las_data = call_lastools('lasclip', input=input_las, output='-stdout', + args=['-poly', initial_crop_poly], verbose=False) + + # Classify ground points + print('Classifying ground...') + las_data = call_lastools('lasground_new', input=las_data, output='-stdout', + args=['-step', lasground_step], verbose=False) + + # Save classified point cloud + las_name = os.path.join(las_dir, las_basename + '.las') + with open (las_name, 'wb') as f: + f.write(las_data) + + # Interpolate point cloud onto a grid + print('Interpolating to grid...') + xyz_name = os.path.join(tmp_dir, las_basename + '.xyz') + call_lastools('las2dem', input=las_data, output=xyz_name, + args=['-step', 1], verbose=False) + + # Make runup clipping mask from gridded point cloud + print('Calculating runup clipping mask...') + shp_name = os.path.join(shp_dir, las_basename + '.shp') + polygon_wave_runup(xyz_name, direct, shp_name, check_value, check_distance, zone_MGA) + #NOTE THAT YOU NEED TO CHECK THE OUTPUT SHP FILE AND ADJUST AS REQUIRED + + #delete the temp files + remove_temp_files(tmp_dir) + + def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - 'input_file', - metavar='PARAMS_FILE', - help='name of parameter file', - default=None) + example_text = """examples: + + # Process single survey at specific beach + python las_manipulation.py survey-1-avoca.yaml + + # Process all surveys at specific beach + python las_manipulation.py *avoca.yaml + + # Process all beaches for specific survey date + python las_manipulation.py survey-1*.yaml + """ + + # Set up command line arguments + parser = argparse.ArgumentParser( + epilog=example_text, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('input', help='path to yaml file(s)', nargs='*') # Print usage if no arguments are provided if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) + # Parse arguments args = parser.parse_args() + yaml_files = [] + [yaml_files.extend(glob(f)) for f in args.input] - # read the parameters file and scroll through it - input_file = args.input_file - params_file=pd.read_excel(input_file, sheet_name="PARAMS") - - for i, row in params_file.iterrows(): - print("Starting to process %s" % row['BEACH']) - input_las = row['INPUT LAS'] - initial_crop_poly = row['INITIAL CROP POLY'] - lasground_step = row['LASGROUND STEP'] - zone_MGA = row['ZONE MGA'] - check_value = row['CHECK VALUE'] - direct = row['DIRECTION'] - check_distance = row['CHECK DISTANCE'] - las_dir = row['LAS CLASSIFIED FOLDER'] - shp_dir = row['SHP SWASH FOLDER'] - tmp_dir = row['TMP FOLDER'] - - # Get base name of input las - las_basename = os.path.splitext(os.path.basename(input_las))[0] - - # Crop to beach boundary - print('Clipping...') - las_data = call_lastools('lasclip', input=input_las, output='-stdout', - args=['-poly', initial_crop_poly], verbose=False) - - # Classify ground points - print('Classifying ground...') - las_data = call_lastools('lasground_new', input=las_data, output='-stdout', - args=['-step', lasground_step], verbose=False) - - # Save classified point cloud - las_name = os.path.join(las_dir, las_basename + '.las') - with open (las_name, 'wb') as f: - f.write(las_data) - - # Interpolate point cloud onto a grid - print('Interpolating to grid...') - xyz_name = os.path.join(tmp_dir, las_basename + '.xyz') - call_lastools('las2dem', input=las_data, output=xyz_name, - args=['-step', 1], verbose=False) - - # Make runup clipping mask from gridded point cloud - print('Calculating runup clipping mask...') - shp_name = os.path.join(shp_dir, las_basename + '.shp') - polygon_wave_runup(xyz_name, direct, shp_name, check_value, check_distance, zone_MGA) - #NOTE THAT YOU NEED TO CHECK THE OUTPUT SHP FILE AND ADJUST AS REQUIRED - - #delete the temp files - remove_temp_files(tmp_dir) + for yaml_file in yaml_files: + process(yaml_file) if __name__ == '__main__': diff --git a/las_outputs.py b/las_outputs.py index 61dac57..21e888b 100644 --- a/las_outputs.py +++ b/las_outputs.py @@ -15,11 +15,13 @@ import io import re import sys import math +import yaml import argparse import datetime import subprocess import numpy as np import pandas as pd +from glob import glob from cycler import cycler import matplotlib.pyplot as plt from matplotlib.ticker import MultipleLocator @@ -166,102 +168,119 @@ def calculate_volumes(profile_name, survey_date, csv_output_dir, ch_limits, volu return delta_vol +def process(yaml_file): + with open(yaml_file, 'r') as f: + params = yaml.safe_load(f) + + print("Starting to process %s" % params['BEACH']) + beach = params['BEACH'] + survey_date = str(params['SURVEY DATE']) + original_las = params['INPUT LAS'] + classified_las_dir = params['LAS CLASSIFIED FOLDER'] + shp_swash_dir = params['SHP SWASH FOLDER'] + crop_heatmap_poly = params['HEATMAP CROP POLY'] + output_las_dir = params['LAS OUTPUT FOLDER'] + zone_MGA = params['ZONE MGA'] + output_poly_dir = params['SHP RASTER FOLDER'] + output_tif_dir = params['TIF OUTPUT FOLDER'] + cp_csv = params['INPUT CSV'] + profile_limit_file = params['PROFILE LIMIT FILE'] + csv_output_dir = params['CSV OUTPUT FOLDER'] + graph_loc = params['PNG OUTPUT FOLDER'] + volume_output_dir = params['CSV VOLUMES FOLDER'] + tmp_dir = params['TMP FOLDER'] + + # Get base name of input las + las_basename = os.path.splitext(os.path.basename(original_las))[0] + + # Get name of input point cloud + input_las = os.path.join(classified_las_dir, las_basename + '.las') + + # Get name of swash cropping polygon + crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') + + # Crop point cloud to swash boundary + print('Cropping swash...') + las_data = call_lastools('lasclip', input=input_las, output='-stdout', + args=['-poly', crop_swash_poly], verbose=False) + + # Apply sea-side clipping polygon + print('Cropping back of beach...') + las_data = call_lastools('lasclip', input=las_data, output='-stdout', + args=['-poly', crop_heatmap_poly], verbose=False) + + # Create clipping polygon for heatmap raster + print('Creating heat map cropping polygon...') + shp_name = os.path.join(output_poly_dir, las_basename + '.shp') + call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) + + # Make a raster from point cloud + print('Creating heat map raster...') + tif_name = os.path.join(output_tif_dir, las_basename + '.tif') + call_lastools('las2dem', input=las_data, output=tif_name, + args=['-step', 0.2, '-keep_class', 2], verbose=False) + + # Extract elevations along profiles from triangulated surface + print('Extracting profile elevations...') + df = extract_pts( + las_data, + cp_csv, + survey_date, + beach, + args=['-parse', 'sxyz', '-keep_class', '2'], + verbose=False) + + # Update survey profiles + update_survey_output(df, csv_output_dir) + + # Get landward limit of surveys + ch_limits = pd.read_excel(profile_limit_file, index_col='Profile') + + # Plot profiles, and save sand volumes for current beach + print('Updating figures...') + profile_names = df['Profile'].unique() + for profile_name in profile_names: + delta_vol = calculate_volumes(profile_name, survey_date, + csv_output_dir, ch_limits, + volume_output_dir) + plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, + delta_vol, survey_date) + + # Remove temprary files + remove_temp_files(tmp_dir) + + def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - 'input_file', - metavar='PARAMS_FILE', - help='name of parameter file', - default=None) + example_text = """examples: + + # Process single survey at specific beach + python las_outputs.py survey-1-avoca.yaml + + # Process all surveys at specific beach + python las_outputs.py *avoca.yaml + + # Process all beaches for specific survey date + python las_outputs.py survey-1*.yaml + """ + + # Set up command line arguments + parser = argparse.ArgumentParser( + epilog=example_text, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('input', help='path to yaml file(s)', nargs='*') # Print usage if no arguments are provided if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) + # Parse arguments args = parser.parse_args() + yaml_files = [] + [yaml_files.extend(glob(f)) for f in args.input] - # read the parameters file and scroll through it - input_file = args.input_file - params_file=pd.read_excel(input_file, sheet_name="PARAMS") - - for i, row in params_file.iterrows(): - print("Starting to process %s" % row['BEACH']) - beach=row['BEACH'] - survey_date = str(row['SURVEY DATE']) - original_las = row['INPUT LAS'] - classified_las_dir = row['LAS CLASSIFIED FOLDER'] - shp_swash_dir = row['SHP SWASH FOLDER'] - crop_heatmap_poly = row['HEATMAP CROP POLY'] - output_las_dir = row['LAS OUTPUT FOLDER'] - zone_MGA = row['ZONE MGA'] - output_poly_dir = row['SHP RASTER FOLDER'] - output_tif_dir = row['TIF OUTPUT FOLDER'] - cp_csv = row['INPUT CSV'] - profile_limit_file = row['PROFILE LIMIT FILE'] - csv_output_dir = row['CSV OUTPUT FOLDER'] - graph_loc = row['PNG OUTPUT FOLDER'] - volume_output_dir = row['CSV VOLUMES FOLDER'] - tmp_dir = row['TMP FOLDER'] - - # Get base name of input las - las_basename = os.path.splitext(os.path.basename(original_las))[0] - - # Get name of input point cloud - input_las = os.path.join(classified_las_dir, las_basename + '.las') - - # Get name of swash cropping polygon - crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') - - # Crop point cloud to swash boundary - print('Cropping swash...') - las_data = call_lastools('lasclip', input=input_las, output='-stdout', - args=['-poly', crop_swash_poly], verbose=False) - - # Apply sea-side clipping polygon - print('Cropping back of beach...') - las_data = call_lastools('lasclip', input=las_data, output='-stdout', - args=['-poly', crop_heatmap_poly], verbose=False) - - # Create clipping polygon for heatmap raster - print('Creating heat map cropping polygon...') - shp_name = os.path.join(output_poly_dir, las_basename + '.shp') - call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) - - # Make a raster from point cloud - print('Creating heat map raster...') - tif_name = os.path.join(output_tif_dir, las_basename + '.tif') - call_lastools('las2dem', input=las_data, output=tif_name, - args=['-step', 0.2, '-keep_class', 2], verbose=False) - - # Extract elevations along profiles from triangulated surface - print('Extracting profile elevations...') - df = extract_pts( - las_data, - cp_csv, - survey_date, - beach, - args=['-parse', 'sxyz', '-keep_class', '2'], - verbose=False) - - # Update survey profiles - update_survey_output(df, csv_output_dir) - - # Get landward limit of surveys - ch_limits = pd.read_excel(profile_limit_file, index_col='Profile') - - # Plot profiles, and save sand volumes for current beach - print('Updating figures...') - profile_names = df['Profile'].unique() - for profile_name in profile_names: - delta_vol = calculate_volumes(profile_name, survey_date, - csv_output_dir, ch_limits, - volume_output_dir) - plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, - delta_vol, survey_date) - - # Remove temprary files - remove_temp_files(tmp_dir) + for yaml_file in yaml_files: + process(yaml_file) if __name__ == '__main__':