From e9b95d3a85876e411200454eacf254e2293e689e Mon Sep 17 00:00:00 2001 From: chrisd Date: Wed, 19 Feb 2020 14:34:29 +1100 Subject: [PATCH] Update 'las_outputs.py' Move legend in profile plots to RHS --- las_outputs.py | 682 ++++++++++++++++++++++++------------------------- 1 file changed, 341 insertions(+), 341 deletions(-) diff --git a/las_outputs.py b/las_outputs.py index 2819933..cb7911f 100644 --- a/las_outputs.py +++ b/las_outputs.py @@ -1,341 +1,341 @@ -"""las_outputs.py -Crop swash zone, plot survey profiles, and complete a volume analysis based -on the output from `las_manipulation.py`. - -Example usage: - -# Process single survey at specific beach -python las_outputs.py survey-1-avoca.yaml - -# Process single survey at multiple beaches -python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml - -# Process all surveys at specific beach -python las_outputs.py *avoca.yaml - -# Process all beaches for specific survey date -python las_outputs.py survey-1*.yaml -""" - -import os -import io -import re -import sys -import math -import yaml -import argparse -import datetime -import subprocess -import numpy as np -import pandas as pd -from glob import glob -from cycler import cycler -import matplotlib.pyplot as plt -from matplotlib.ticker import MultipleLocator - -import nielsen_volumes -from survey_tools import call_lastools, extract_pts, update_survey_output - - -def get_datestring(x): - """Format a date integer into an ISO-8601 date string - - Args: - x: unformatted date - - Returns: - formatted date string - - Examples: - - >> get_datestring(19700101) - '1970-01-01' - """ - datestr = pd.datetime.strptime(str(x), '%Y%m%d').strftime('%Y-%m-%d') - - return datestr - - -def remove_temp_files(directory): - for f in os.listdir(directory): - os.unlink(os.path.join(directory, f)) - - return None - - -def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol, survey_date, scale_figures=False): - csv_name = profile_name + '.csv' - profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name)) - - # Remove metadata, and extract profile coordinates - profiles = profiles.loc[:, 'Chainage':].set_index('Chainage') - - # Determine if current survey is the latest - current_survey_col = 'Elevation_' + survey_date - is_latest = profiles.columns[-1] == current_survey_col - - # Only plot profiles up to current survey date - profiles = profiles.loc[:, :current_survey_col] - - # Find landward limit of profile (behind beach) - ch_min = ch_limits.loc[profile_name, 'Landward Limit'] - - # Set figure dimensions based on beach size - vertical_exag = 5 - m_per_inch = 8 - try: - fig_h = profiles.dropna().values.max() / m_per_inch * vertical_exag - fig_w = (profiles.index.max() - ch_min) / m_per_inch - except ValueError: - fig_h = 2.3 - fig_w = 10 - - if scale_figures: - fig, ax = plt.subplots(figsize=(fig_w, fig_h)) - ax.set_aspect(vertical_exag) - else: - fig, ax = plt.subplots(figsize=(10, 2.3)) - - - for col in profiles.columns: - profile = profiles.loc[:, col] - date_str = col.split('_')[-1] - date = get_datestring(date_str) - - ax.plot(profile.index, profile, label=date) - - # Show landward limit of volume calculations - ax.axvline(x=ch_min, color='#222222', linestyle='--', label='Landward limit') - - ax.set_title(profile_name) - ax.set_xlabel('Chainage (m)', labelpad=10) - ax.set_ylabel('Elevation (m AHD)', labelpad=10) - - Ylim=ax.get_ylim()[1] - - if Ylim<10: - ax.set_ylim([ax.get_ylim()[0], 10]) - - # Remove empty space at left of figure - try: - ax.set_xlim(left=profile.first_valid_index() - 10) - except TypeError: - pass - - # Show most recent volume change - if delta_vol is not None: - ax.annotate('Most recent\nvolume change:\n{:+0.1f} m$^3$/m'.format(delta_vol), - (0.05, 0.15), xycoords='axes fraction', fontsize=9, - backgroundcolor='#ffffff', linespacing=1.5) - - ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1) - - ax.xaxis.set_minor_locator(MultipleLocator(5)) - ax.yaxis.set_minor_locator(MultipleLocator(0.5)) - ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3) - ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3) - - # Save in folder with current date - png_dirs = [os.path.join(graph_loc, get_datestring(survey_date))] - if is_latest: - # Save copy in'latest' if survey is most recent - png_dirs += [os.path.join(graph_loc, 'latest')] - - for png_dir in png_dirs: - # Prepare output directory - try: - os.makedirs(png_dir) - except FileExistsError: - pass - - png_name = os.path.join(png_dir, profile_name + '.png') - plt.savefig(png_name, bbox_inches='tight', dpi=300) - plt.close() - - -def calculate_volumes(profile_name, current_survey_date, previous_survey_date, - csv_output_dir, ch_limits, volume_output_dir): - csv_prof = profile_name + '.csv' - beach = re.search('.*(?=_\d)', profile_name).group() - profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof)) - - # Remove metadata, and extract profile coordinates - profiles = profiles.loc[:, 'Chainage':].set_index('Chainage') - - # Find landward limit of profile (behind beach) - ch_min = ch_limits.loc[profile_name, 'Landward Limit'] - - # Open volume spreadsheet - csv_vol = os.path.join(volume_output_dir, 'volumes.csv') - try: - volumes = pd.read_csv(csv_vol, index_col=0) - except FileNotFoundError: - # Create new dataframe if csv does not exist - volumes = pd.DataFrame() - - # Get Nielsen erosion volumes for current survey date - current_survey = 'Elevation_' + current_survey_date - chainage = profiles.loc[:, current_survey].dropna().index - elevation = profiles.loc[:, current_survey].dropna().values - try: - volume = nielsen_volumes.volume_available(chainage, elevation, ch_min) - except ValueError: - volume = np.nan - - # Update spreadsheet - volumes.loc[profile_name, 'Volume_' + current_survey_date] = volume - - # Save updated volumes spreadsheet - volumes = volumes[volumes.columns.sort_values()] - volumes = volumes.sort_index() - volumes.to_csv(csv_vol) - - # Get most recent volume difference for current profile - current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date) - previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date) - - if previous_date_idx < 0: - # Return None if there is only one survey - delta_vol = None - else: - previous_vol = volumes.loc[profile_name][previous_date_idx] - current_vol = volumes.loc[profile_name][current_date_idx] - delta_vol = current_vol - previous_vol - - return delta_vol - - -def process(yaml_file): - with open(yaml_file, 'r') as f: - params = yaml.safe_load(f) - - print("Starting to process %s" % params['BEACH']) - beach = params['BEACH'] - survey_date = str(params['SURVEY DATE']) - survey_date_previous = str(params['PREVIOUS SURVEY DATE']) - original_las = params['INPUT LAS'] - classified_las_dir = params['LAS CLASSIFIED FOLDER'] - shp_swash_dir = params['SHP SWASH FOLDER'] - crop_heatmap_poly = params['HEATMAP CROP POLY'] - output_las_dir = params['LAS OUTPUT FOLDER'] - zone_MGA = params['ZONE MGA'] - output_poly_dir = params['SHP RASTER FOLDER'] - output_tif_dir = params['TIF DEM FOLDER'] - cp_csv = params['INPUT CSV'] - profile_limit_file = params['PROFILE LIMIT FILE'] - csv_output_dir = params['CSV OUTPUT FOLDER'] - graph_loc = params['PNG OUTPUT FOLDER'] - volume_output_dir = params['CSV VOLUMES FOLDER'] - tmp_dir = params['TMP FOLDER'] - - # Get base name of input las - #las_basename = os.path.splitext(os.path.basename(original_las))[0] - - las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date) - - # Get name of input point cloud - input_las = os.path.join(classified_las_dir, las_basename + '.las') - - # Get name of swash cropping polygon - crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') - - # Crop point cloud to swash boundary - print('Cropping swash...') - las_data = call_lastools('lasclip', input=input_las, output='-stdout', - args=['-poly', crop_swash_poly], verbose=False) - - # Export classified, clipped las for delivery to client - las_name = os.path.join(output_las_dir, las_basename + '.las') - with open (las_name, 'wb') as f: - f.write(las_data) - - # Apply sea-side clipping polygon - print('Cropping back of beach...') - las_data = call_lastools('lasclip', input=las_data, output='-stdout', - args=['-poly', crop_heatmap_poly], verbose=False) - - - # Create clipping polygon for heatmap raster - print('Creating heat map cropping polygon...') - shp_name = os.path.join(output_poly_dir, las_basename + '.shp') - call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) - - # Make a raster from point cloud - print('Creating heat map raster...') - tif_name = os.path.join(output_tif_dir, las_basename + '_DEM.tif') - call_lastools('las2dem', input=las_data, output=tif_name, - args=['-step', 1, '-keep_class', 2], verbose=False) - # IF THIS STEP ISN'T WORKING: - # might mean there are no data lines - # trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001] - #call_lastools('las2dem', input=las_data, output=tif_name, - # args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001], verbose=False) - - # Extract elevations along profiles from triangulated surface - print('Extracting profile elevations...') - df = extract_pts( - las_data, - cp_csv, - survey_date, - beach, - args=['-parse', 'sxyz', '-keep_class', '2'], - verbose=False) - - # Update survey profiles - update_survey_output(df, csv_output_dir) - - # Get landward limit of surveys - ch_limits = pd.read_excel(profile_limit_file, index_col='Profile') - - # Plot profiles, and save sand volumes for current beach - print('Updating figures...') - profile_names = df['Profile'].unique() - for profile_name in profile_names: - delta_vol = calculate_volumes(profile_name, survey_date, survey_date_previous, - csv_output_dir, ch_limits, - volume_output_dir) - plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, - delta_vol, survey_date) - - # Remove temprary files - remove_temp_files(tmp_dir) - - -def main(): - example_text = """examples: - - # Process single survey at specific beach - python las_outputs.py survey-1-avoca.yaml - - # Process single survey at multiple beaches - python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml - - # Process all surveys at specific beach - python las_outputs.py *avoca.yaml - - # Process all beaches for specific survey date - python las_outputs.py survey-1*.yaml - """ - - # Set up command line arguments - parser = argparse.ArgumentParser( - epilog=example_text, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('input', help='path to yaml file(s)', nargs='*') - - # Print usage if no arguments are provided - if len(sys.argv) == 1: - parser.print_help(sys.stderr) - sys.exit(1) - - # Parse arguments - args = parser.parse_args() - yaml_files = [] - [yaml_files.extend(glob(f)) for f in args.input] - - for yaml_file in yaml_files: - process(yaml_file) - - -if __name__ == '__main__': - main() +"""las_outputs.py +Crop swash zone, plot survey profiles, and complete a volume analysis based +on the output from `las_manipulation.py`. + +Example usage: + +# Process single survey at specific beach +python las_outputs.py survey-1-avoca.yaml + +# Process single survey at multiple beaches +python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml + +# Process all surveys at specific beach +python las_outputs.py *avoca.yaml + +# Process all beaches for specific survey date +python las_outputs.py survey-1*.yaml +""" + +import os +import io +import re +import sys +import math +import yaml +import argparse +import datetime +import subprocess +import numpy as np +import pandas as pd +from glob import glob +from cycler import cycler +import matplotlib.pyplot as plt +from matplotlib.ticker import MultipleLocator + +import nielsen_volumes +from survey_tools import call_lastools, extract_pts, update_survey_output + + +def get_datestring(x): + """Format a date integer into an ISO-8601 date string + + Args: + x: unformatted date + + Returns: + formatted date string + + Examples: + + >> get_datestring(19700101) + '1970-01-01' + """ + datestr = pd.datetime.strptime(str(x), '%Y%m%d').strftime('%Y-%m-%d') + + return datestr + + +def remove_temp_files(directory): + for f in os.listdir(directory): + os.unlink(os.path.join(directory, f)) + + return None + + +def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol, survey_date, scale_figures=False): + csv_name = profile_name + '.csv' + profiles = pd.read_csv(os.path.join(csv_output_dir, csv_name)) + + # Remove metadata, and extract profile coordinates + profiles = profiles.loc[:, 'Chainage':].set_index('Chainage') + + # Determine if current survey is the latest + current_survey_col = 'Elevation_' + survey_date + is_latest = profiles.columns[-1] == current_survey_col + + # Only plot profiles up to current survey date + profiles = profiles.loc[:, :current_survey_col] + + # Find landward limit of profile (behind beach) + ch_min = ch_limits.loc[profile_name, 'Landward Limit'] + + # Set figure dimensions based on beach size + vertical_exag = 5 + m_per_inch = 8 + try: + fig_h = profiles.dropna().values.max() / m_per_inch * vertical_exag + fig_w = (profiles.index.max() - ch_min) / m_per_inch + except ValueError: + fig_h = 2.3 + fig_w = 10 + + if scale_figures: + fig, ax = plt.subplots(figsize=(fig_w, fig_h)) + ax.set_aspect(vertical_exag) + else: + fig, ax = plt.subplots(figsize=(10, 2.3)) + + + for col in profiles.columns: + profile = profiles.loc[:, col] + date_str = col.split('_')[-1] + date = get_datestring(date_str) + + ax.plot(profile.index, profile, label=date) + + # Show landward limit of volume calculations + ax.axvline(x=ch_min, color='#222222', linestyle='--', label='Landward limit') + + ax.set_title(profile_name) + ax.set_xlabel('Chainage (m)', labelpad=10) + ax.set_ylabel('Elevation (m AHD)', labelpad=10) + + Ylim=ax.get_ylim()[1] + + if Ylim<10: + ax.set_ylim([ax.get_ylim()[0], 10]) + + # Remove empty space at left of figure + try: + ax.set_xlim(left=profile.first_valid_index() - 10) + except TypeError: + pass + + # Show most recent volume change + if delta_vol is not None: + ax.annotate('Most recent\nvolume change:\n{:+0.1f} m$^3$/m'.format(delta_vol), + (0.05, 0.15), xycoords='axes fraction', fontsize=9, + backgroundcolor='#ffffff', linespacing=1.5) + + ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1) + loc='lower left', bbox_to_anchor=(1.02,0)) + ax.xaxis.set_minor_locator(MultipleLocator(5)) + ax.yaxis.set_minor_locator(MultipleLocator(0.5)) + ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3) + ax.yaxis.grid(True,which='minor',color='k', linewidth=0.5, alpha=0.3) + + # Save in folder with current date + png_dirs = [os.path.join(graph_loc, get_datestring(survey_date))] + if is_latest: + # Save copy in'latest' if survey is most recent + png_dirs += [os.path.join(graph_loc, 'latest')] + + for png_dir in png_dirs: + # Prepare output directory + try: + os.makedirs(png_dir) + except FileExistsError: + pass + + png_name = os.path.join(png_dir, profile_name + '.png') + plt.savefig(png_name, bbox_inches='tight', dpi=300) + plt.close() + + +def calculate_volumes(profile_name, current_survey_date, previous_survey_date, + csv_output_dir, ch_limits, volume_output_dir): + csv_prof = profile_name + '.csv' + beach = re.search('.*(?=_\d)', profile_name).group() + profiles = pd.read_csv(os.path.join(csv_output_dir, csv_prof)) + + # Remove metadata, and extract profile coordinates + profiles = profiles.loc[:, 'Chainage':].set_index('Chainage') + + # Find landward limit of profile (behind beach) + ch_min = ch_limits.loc[profile_name, 'Landward Limit'] + + # Open volume spreadsheet + csv_vol = os.path.join(volume_output_dir, 'volumes.csv') + try: + volumes = pd.read_csv(csv_vol, index_col=0) + except FileNotFoundError: + # Create new dataframe if csv does not exist + volumes = pd.DataFrame() + + # Get Nielsen erosion volumes for current survey date + current_survey = 'Elevation_' + current_survey_date + chainage = profiles.loc[:, current_survey].dropna().index + elevation = profiles.loc[:, current_survey].dropna().values + try: + volume = nielsen_volumes.volume_available(chainage, elevation, ch_min) + except ValueError: + volume = np.nan + + # Update spreadsheet + volumes.loc[profile_name, 'Volume_' + current_survey_date] = volume + + # Save updated volumes spreadsheet + volumes = volumes[volumes.columns.sort_values()] + volumes = volumes.sort_index() + volumes.to_csv(csv_vol) + + # Get most recent volume difference for current profile + current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date) + previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date) + + if previous_date_idx < 0: + # Return None if there is only one survey + delta_vol = None + else: + previous_vol = volumes.loc[profile_name][previous_date_idx] + current_vol = volumes.loc[profile_name][current_date_idx] + delta_vol = current_vol - previous_vol + + return delta_vol + + +def process(yaml_file): + with open(yaml_file, 'r') as f: + params = yaml.safe_load(f) + + print("Starting to process %s" % params['BEACH']) + beach = params['BEACH'] + survey_date = str(params['SURVEY DATE']) + survey_date_previous = str(params['PREVIOUS SURVEY DATE']) + original_las = params['INPUT LAS'] + classified_las_dir = params['LAS CLASSIFIED FOLDER'] + shp_swash_dir = params['SHP SWASH FOLDER'] + crop_heatmap_poly = params['HEATMAP CROP POLY'] + output_las_dir = params['LAS OUTPUT FOLDER'] + zone_MGA = params['ZONE MGA'] + output_poly_dir = params['SHP RASTER FOLDER'] + output_tif_dir = params['TIF DEM FOLDER'] + cp_csv = params['INPUT CSV'] + profile_limit_file = params['PROFILE LIMIT FILE'] + csv_output_dir = params['CSV OUTPUT FOLDER'] + graph_loc = params['PNG OUTPUT FOLDER'] + volume_output_dir = params['CSV VOLUMES FOLDER'] + tmp_dir = params['TMP FOLDER'] + + # Get base name of input las + #las_basename = os.path.splitext(os.path.basename(original_las))[0] + + las_basename='%s_%s' % (beach.lower().replace(" ","_"), survey_date) + + # Get name of input point cloud + input_las = os.path.join(classified_las_dir, las_basename + '.las') + + # Get name of swash cropping polygon + crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') + + # Crop point cloud to swash boundary + print('Cropping swash...') + las_data = call_lastools('lasclip', input=input_las, output='-stdout', + args=['-poly', crop_swash_poly], verbose=False) + + # Export classified, clipped las for delivery to client + las_name = os.path.join(output_las_dir, las_basename + '.las') + with open (las_name, 'wb') as f: + f.write(las_data) + + # Apply sea-side clipping polygon + print('Cropping back of beach...') + las_data = call_lastools('lasclip', input=las_data, output='-stdout', + args=['-poly', crop_heatmap_poly], verbose=False) + + + # Create clipping polygon for heatmap raster + print('Creating heat map cropping polygon...') + shp_name = os.path.join(output_poly_dir, las_basename + '.shp') + call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False) + + # Make a raster from point cloud + print('Creating heat map raster...') + tif_name = os.path.join(output_tif_dir, las_basename + '_DEM.tif') + call_lastools('las2dem', input=las_data, output=tif_name, + args=['-step', 1, '-keep_class', 2], verbose=False) + # IF THIS STEP ISN'T WORKING: + # might mean there are no data lines + # trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001] + #call_lastools('las2dem', input=las_data, output=tif_name, + # args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001], verbose=False) + + # Extract elevations along profiles from triangulated surface + print('Extracting profile elevations...') + df = extract_pts( + las_data, + cp_csv, + survey_date, + beach, + args=['-parse', 'sxyz', '-keep_class', '2'], + verbose=False) + + # Update survey profiles + update_survey_output(df, csv_output_dir) + + # Get landward limit of surveys + ch_limits = pd.read_excel(profile_limit_file, index_col='Profile') + + # Plot profiles, and save sand volumes for current beach + print('Updating figures...') + profile_names = df['Profile'].unique() + for profile_name in profile_names: + delta_vol = calculate_volumes(profile_name, survey_date, survey_date_previous, + csv_output_dir, ch_limits, + volume_output_dir) + plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, + delta_vol, survey_date) + + # Remove temprary files + remove_temp_files(tmp_dir) + + +def main(): + example_text = """examples: + + # Process single survey at specific beach + python las_outputs.py survey-1-avoca.yaml + + # Process single survey at multiple beaches + python las_outputs.py survey-1-avoca.yaml survey-1-pearl.yaml + + # Process all surveys at specific beach + python las_outputs.py *avoca.yaml + + # Process all beaches for specific survey date + python las_outputs.py survey-1*.yaml + """ + + # Set up command line arguments + parser = argparse.ArgumentParser( + epilog=example_text, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('input', help='path to yaml file(s)', nargs='*') + + # Print usage if no arguments are provided + if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + + # Parse arguments + args = parser.parse_args() + yaml_files = [] + [yaml_files.extend(glob(f)) for f in args.input] + + for yaml_file in yaml_files: + process(yaml_file) + + +if __name__ == '__main__': + main()