Compare commits

..

2 Commits

Author SHA1 Message Date
Chris Drummond 7ad2760377 Does not crop the classified las file 5 years ago
Chris Drummond b00902c204 Changed gridding from 1 m to 0.1 m in blast2dem 5 years ago

@ -63,7 +63,7 @@ python las_outputs.py param-files/survey-1*.yaml param-files/survey-2*.yaml
Example usage: Example usage:
``` ```
C:\Python27\ArcGIS10.5\python generate_heatmaps.py param-files\survey-2*.yaml C:/Python27/ArcGIS10.4/python generate_heatmaps.py param-files/survey-2*.yaml
``` ```
@ -76,29 +76,5 @@ C:\Python27\ArcGIS10.5\python generate_heatmaps.py param-files\survey-2*.yaml
Example usage: Example usage:
``` ```
C:\Python27\ArcGIS10.5\python plot_heatmaps.py param-files\survey-2-avoca.yaml C:/Python27/ArcGIS10.4/python plot_heatmaps.py param-files/survey-2-avoca.yaml
```
### 7. Export 0.7 m contour for DSAS trend analysis
`extract_contours.py` extracts a predefined contour (say 0.7 m AHD) and outputs it in a format suitable for DSAS shoreline trend analysis.
Example usage:
```
python extract_contours.py
```
## polyline_to_points.py
This script interpolates points along transects in a shapefile.
Example usage:
```
# Extract points at default spacing (1m)
$ python polyline_to_points.py path/to/shp
# Extract points at 5m increments
$ python polyline_to_points.py -s 5 path/to/shp
# Use profile names from field "ProfileID" in the attribute table at 1 m spacing
$ python polyline_to_points.py -f ProfileID -s 1 path/to/shp
``` ```

@ -72,10 +72,10 @@ def process(yaml_name):
raise ValueError('No previous survey date provided') raise ValueError('No previous survey date provided')
# Set paths for raster files # Set paths for raster files
current_raster = os.path.join(input_tif_dir, base_name + '_DEM.tif') current_raster = os.path.join(input_tif_dir, base_name + '.tif')
previous_base_name = re.sub('\d+', previous_date, base_name) previous_base_name = re.sub('\d+', previous_date, base_name)
previous_raster = os.path.join(input_tif_dir, previous_base_name + '_DEM.tif') previous_raster = os.path.join(input_tif_dir, previous_base_name + '.tif')
heatmap_raster = os.path.join(output_tif_dir, base_name + '_heatmap.tif') heatmap_raster = os.path.join(output_tif_dir, base_name + '.tif')
print('processing {}'.format(beach)) print('processing {}'.format(beach))

@ -407,7 +407,7 @@ def process(yaml_file):
print('Interpolating to grid...') print('Interpolating to grid...')
xyz_name = os.path.join(tmp_dir, las_basename + '.xyz') xyz_name = os.path.join(tmp_dir, las_basename + '.xyz')
call_lastools('blast2dem', input=las_classified_name, output=xyz_name, call_lastools('blast2dem', input=las_classified_name, output=xyz_name,
args=['-step', 1], verbose=False) args=['-step', 0.1], verbose=False)
# Make runup clipping mask from gridded point cloud # Make runup clipping mask from gridded point cloud
print('Calculating runup clipping mask...') print('Calculating runup clipping mask...')

@ -129,7 +129,7 @@ def plot_profiles(profile_name, csv_output_dir, graph_loc, ch_limits, delta_vol,
backgroundcolor='#ffffff', linespacing=1.5) backgroundcolor='#ffffff', linespacing=1.5)
ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1) ax.legend(edgecolor='none', facecolor='#ffffff', fontsize=9, labelspacing=1)
loc='lower left', bbox_to_anchor=(1.02,0))
ax.xaxis.set_minor_locator(MultipleLocator(5)) ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(0.5)) ax.yaxis.set_minor_locator(MultipleLocator(0.5))
ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3) ax.xaxis.grid(True, which='minor', color='k', linewidth=0.5, alpha=0.3)
@ -190,14 +190,15 @@ def calculate_volumes(profile_name, current_survey_date, previous_survey_date,
volumes = volumes.sort_index() volumes = volumes.sort_index()
volumes.to_csv(csv_vol) volumes.to_csv(csv_vol)
# Get most recent volume difference for current profile
current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date)
previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date)
if previous_date_idx < 0:
if previous_survey_date=="nan":
# Return None if there is only one survey # Return None if there is only one survey
delta_vol = None delta_vol = None
else: else:
# Get most recent volume difference for current profile
current_date_idx = volumes.columns.get_loc('Volume_' + current_survey_date)
previous_date_idx = volumes.columns.get_loc('Volume_' + previous_survey_date)
previous_vol = volumes.loc[profile_name][previous_date_idx] previous_vol = volumes.loc[profile_name][previous_date_idx]
current_vol = volumes.loc[profile_name][current_date_idx] current_vol = volumes.loc[profile_name][current_date_idx]
delta_vol = current_vol - previous_vol delta_vol = current_vol - previous_vol
@ -239,32 +240,11 @@ def process(yaml_file):
# Get name of swash cropping polygon # Get name of swash cropping polygon
crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp') crop_swash_poly = os.path.join(shp_swash_dir, las_basename + '.shp')
# Crop point cloud to swash boundary
print('Cropping swash...')
las_data = call_lastools('lasclip', input=input_las, output='-stdout',
args=['-poly', crop_swash_poly], verbose=False)
# Export classified, clipped las for delivery to client
las_name = os.path.join(output_las_dir, las_basename + '.las')
with open (las_name, 'wb') as f:
f.write(las_data)
# Apply sea-side clipping polygon
print('Cropping back of beach...')
las_data = call_lastools('lasclip', input=las_data, output='-stdout',
args=['-poly', crop_heatmap_poly], verbose=False)
# Create clipping polygon for heatmap raster
print('Creating heat map cropping polygon...')
shp_name = os.path.join(output_poly_dir, las_basename + '.shp')
call_lastools('lasboundary', input=las_data, output=shp_name, verbose=False)
# Make a raster from point cloud # Make a raster from point cloud
print('Creating heat map raster...') print('Creating heat map raster...')
tif_name = os.path.join(output_tif_dir, las_basename + '_DEM.tif') tif_name = os.path.join(output_tif_dir, las_basename + '.tif')
call_lastools('las2dem', input=las_data, output=tif_name, call_lastools('blast2dem', input=input_las, output=tif_name,
args=['-step', 1, '-keep_class', 2], verbose=False) args=['-step', 0.1, '-keep_class', 2], verbose=True)
# IF THIS STEP ISN'T WORKING: # IF THIS STEP ISN'T WORKING:
# might mean there are no data lines # might mean there are no data lines
# trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001] # trying running with args=['-step', 1, '-keep_class', 2, '-rescale', 0.001,0.001,0.001]
@ -274,7 +254,7 @@ def process(yaml_file):
# Extract elevations along profiles from triangulated surface # Extract elevations along profiles from triangulated surface
print('Extracting profile elevations...') print('Extracting profile elevations...')
df = extract_pts( df = extract_pts(
las_data, input_las,
cp_csv, cp_csv,
survey_date, survey_date,
beach, beach,

Binary file not shown.

@ -70,7 +70,7 @@ def process(yaml_name):
raise ValueError('No previous survey date provided') raise ValueError('No previous survey date provided')
# Set paths for raster files # Set paths for raster files
heatmap_raster = os.path.join(input_tif_dir, base_name + '_heatmap.tif') heatmap_raster = os.path.join(input_tif_dir, base_name + '.tif')
print('processing {}'.format(beach)) print('processing {}'.format(beach))
mxd = arcpy.mapping.MapDocument(mxd_name) mxd = arcpy.mapping.MapDocument(mxd_name)

@ -1,92 +0,0 @@
"""polyline_to_points.py
Extract interpolated points along transects in a shapefile.
D. Howe
d.howe@wrl.unsw.edu.au
2020-02-19
"""
import sys
import argparse
import numpy as np
import pandas as pd
import geopandas as gpd
from shapely.geometry import LineString
def extract(shp_path, spacing=1, field=None):
rows = []
shp = gpd.read_file(shp_path)
if field is None:
# Assume profile name is second field in shapefile
field = shp.columns[1]
for i, line in shp.iterrows():
g = line.geometry
chainages = np.arange(0, g.length, step=spacing)
for chainage in chainages:
easting, northing = g.interpolate(chainage).xy
row = {
'ProfileNum': line[field],
'Easting': easting[0],
'Northing': northing[0],
'Chainage': chainage,
}
rows.append(row)
# Create output dataframe
df = pd.DataFrame(rows)
# Re-order columns
df = df[['ProfileNum', 'Easting', 'Northing', 'Chainage']]
# Export to csv
csv_path = shp_path.replace('.shp', '.csv')
df.to_csv(csv_path, index=False)
def main():
example_text = """examples:
# Extract points at default spacing (1m)
$ python polyline_to_points.py path/to/shp
# Extract points at 5m increments
$ python polyline_to_points.py -s 5 path/to/shp
# Use profile names from field "ProfileID" in the attribute table
$ python polyline_to_points.py -f ProfileID path/to/shp
"""
# Set up command line arguments
parser = argparse.ArgumentParser(
epilog=example_text,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('shp_path',
metavar='SHAPEFILE',
help='path to input shapefile')
parser.add_argument('-s',
'--spacing',
metavar='SPACING',
default=1,
type=int,
help='space between points (default=1)')
parser.add_argument('-f',
'--field',
metavar='FIELDNAME',
help='profile field name in attribute table')
# Print usage if no arguments are provided
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
# Parse arguments
args = parser.parse_args()
# Combine images
extract(**vars(args))
if __name__ == '__main__':
main()
Loading…
Cancel
Save