updated most functions and workflow

development
kvos 7 years ago
parent 7ca64f2fa0
commit 0e78aace0c

3
.gitignore vendored

@ -1,3 +1,6 @@
*.pyc
*.mat
*.tif
*.png
*.mp4
*.gif

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,9 +1,8 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 27 17:12:35 2018
@author: Kilian
"""
#==========================================================#
# Download L7 images of a given area between given dates
#==========================================================#
# Initial settings
import os
@ -12,7 +11,6 @@ import matplotlib.pyplot as plt
import pdb
import ee
# other modules
from osgeo import gdal, ogr, osr
from urllib.request import urlretrieve
@ -21,8 +19,6 @@ from datetime import datetime
import pytz
import pickle
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
@ -30,7 +26,6 @@ import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils

@ -1,9 +1,8 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 27 17:12:35 2018
@author: Kilian
"""
#==========================================================#
# Download L8 images of a given area between given dates
#==========================================================#
# Initial settings
import os
@ -12,7 +11,6 @@ import matplotlib.pyplot as plt
import pdb
import ee
# other modules
from osgeo import gdal, ogr, osr
from urllib.request import urlretrieve
@ -21,8 +19,6 @@ from datetime import datetime
import pytz
import pickle
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
@ -30,7 +26,6 @@ import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils
@ -53,24 +48,37 @@ def download_tif(image, polygon, bandsId, filepath):
# select collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.301454, -33.700754],
[151.311453, -33.702075],
[151.307237, -33.739761],
[151.294220, -33.736329],
[151.301454, -33.700754]]];
#polygon = [[[151.301454, -33.700754],
# [151.311453, -33.702075],
# [151.307237, -33.739761],
# [151.294220, -33.736329],
# [151.301454, -33.700754]]];
# location (Oldbar beach)
polygon = [[[152.664508, -31.896163],
[152.665827, -31.897112],
[152.631516, -31.924846],
[152.629285, -31.923362],
[152.664508, -31.896163]]]
# location (Oldbar inlet)
#polygon = [[[152.676283, -31.866784],
# [152.709174, -31.869993],
# [152.678229, -31.892082],
# [152.670366, -31.886360],
# [152.676283, -31.866784]]];
# dates
#start_date = '2016-01-01'
#end_date = '2016-12-31'
start_date = '2013-01-01'
end_date = '2018-12-31'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon))#.filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
print('Number of images covering the area:', n_img)
im_all = flt_col.getInfo().get('features')
satname = 'L8'
sitename = 'NARRA'
#sitename = 'NARRA'
sitename = 'OLDBAR'
suffix = '.tif'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
filepath_pan = os.path.join(filepath, 'pan')
@ -105,9 +113,9 @@ for i in range(n_img):
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
all_names_pan.append(filename_pan)
local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan)
local_data_pan = download_tif(im, polygon, pan_band, filepath_pan)
os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms)
local_data_ms = download_tif(im, polygon, ms_bands, filepath_ms)
os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))

@ -0,0 +1,106 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Draw reference points on satellite image
#==========================================================#
# Preamble
import os
import ee
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import pandas as pd
from datetime import datetime
import pickle
import pdb
import pytz
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
# collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
#polygon = [[[151.301454, -33.700754],
# [151.311453, -33.702075],
# [151.307237, -33.739761],
# [151.294220, -33.736329],
# [151.301454, -33.700754]]];
# location (Oldbar shoreline)
#polygon = [[[152.664508, -31.896163],
# [152.665827, -31.897112],
# [152.631516, -31.924846],
# [152.629285, -31.923362],
# [152.664508, -31.896163]]];
# location (Oldbar inlet)
polygon = [[[152.676283, -31.866784],
[152.709174, -31.869993],
[152.678229, -31.892082],
[152.670366, -31.886360],
[152.676283, -31.866784]]];
# dates
start_date = '2017-01-30'
end_date = '2017-02-02'
#start_date = '2017-01-30'
#end_date = '2018-02-02'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering the area:', n_img)
im_all = flt_col.getInfo().get('features')
satname = 'L8'
#sitename = 'NARRA'
sitename = 'OLDBAR_inlet'
# parameters
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100 # minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
cloud_threshold = 0.8
# find image in ee database
im = ee.Image(im_all[0].get('id'))
# load image as np.array
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, polygon, satname, plot_bool)
# rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
plt.show()
pts = ginput(n=50, timeout=1000, show_clicks=True)
points = np.array(pts)
plt.plot(points[:,0], points[:,1], 'ko')
plt.show()
pts_coords = sds.convert_pix2world(points[:,[1,0]], crs['crs_15m'])
pts = sds.convert_epsg(pts_coords, crs['epsg_code'], output_epsg)
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_refpoints2.pkl'), 'wb') as f:
pickle.dump(pts, f)

@ -110,7 +110,7 @@ def create_cloud_mask(im_qa, satname, plot_bool):
cloud_mask = np.isin(im_qa, cloud_values)
# remove isolated cloud pixels (there are some in the swash and they cause problems)
if sum(sum(cloud_mask)) > 0:
morphology.remove_small_objects(cloud_mask, min_size=5, connectivity=8, in_place=True)
morphology.remove_small_objects(cloud_mask, min_size=10, connectivity=1, in_place=True)
if plot_bool:
plt.figure()
@ -122,7 +122,7 @@ def create_cloud_mask(im_qa, satname, plot_bool):
return cloud_mask
def read_eeimage(im, polygon, plot_bool):
def read_eeimage(im, polygon, sat_name, plot_bool):
"""
Read an ee.Image() object and returns the panchromatic band, multispectral bands (B, G, R, NIR, SWIR)
and a cloud mask. All outputs are at 15m resolution (bilinear interpolation for the multispectral bands)
@ -177,7 +177,7 @@ def read_eeimage(im, polygon, plot_bool):
qa_band = [im_bands[11]]
im_qa, crs_qa = load_image(im, polygon, qa_band)
im_qa = im_qa[:,:,0]
im_cloud = create_cloud_mask(im_qa)
im_cloud = create_cloud_mask(im_qa, sat_name, plot_bool)
im_cloud = transform.resize(im_cloud, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True, mode='constant').astype('bool_')

@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 5 16:19:31 2018
@author: z5030440
"""
d_gt = {'arr':sl_gt}
d_sds = {'arr':sl_sds}
sio.savemat('sl_gt.mat', mdict=d_gt)
sio.savemat('sl_sds.mat', mdict=d_sds)
#%%
herror = sio.loadmat('hor_error.mat')
diff_p = (herror['gt_av'] - herror['sds_av'])[0,:]
f = plt.figure()
plt.subplot(3,1,1)
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_rmse'][0])
plt.ylabel('rmse [m]')
plt.xticks([])
plt.title('Horizontal cross-shore error')
plt.subplot(3,1,2)
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_mean'][0], color=orange)
plt.ylabel('mean [m]')
plt.xticks([])
plt.subplot(3,1,3)
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_std'][0], color='g')
plt.ylabel('std [m]')
plt.xlabel('comparison #')
plt.grid(False)
plt.grid(axis='y')
f.subplots_adjust(hspace=0.2)
plt.draw()

@ -0,0 +1,65 @@
close all
clear
clc
addpath(genpath('C:\Users\z5030440\Documents\GitHub\geetools\functions\xyz2spz'))
sl_gt = load('sl_gt.mat')
sl_sds = load('sl_sds.mat')
sl_sds = sl_sds.arr
sl_gt = sl_gt.arr
for i = 1:length(sl_sds)
sds.x = sl_sds{i}(:,1)
sds.y = sl_sds{i}(:,2)
sds.z = zeros(length(sl_sds{i}(:,1)),1)
gt.x = sl_gt{i}(:,1)
gt.y = sl_gt{i}(:,2)
gt.z = zeros(length(sl_gt{i}(:,1)),1)
outsds = xyz2spz(sds,'NARRA')
outgt = xyz2spz(gt,'NARRA')
figure
hold on
grid on
box on
plot(outsds.s, outsds.p, 'b-', 'linewidth',2)
plot(outgt.s, outgt.p, 'r-', 'linewidth',2)
xlabel('s [m]')
ylabel('p [m]')
title('Horizontal comparison in spz coordinates')
legend({'SDS', 'contour at tide level'})
xq = nanmin(outsds.s):10:nanmax(outsds.s)
[gt_s idx_gt] = unique(outgt.s)
gt_p = outgt.p(idx_gt)
gt_p_int = interp1(gt_s, gt_p, xq)
[sds_s idx_sds] = unique(outsds.s)
sds_p = outsds.p(idx_sds)
sds_p_int = interp1(sds_s, sds_p, xq)
diff_p = sds_p_int - gt_p_int;
sds_av(i) = median(sds_p_int(~(sds_p_int > median(sds_p_int) + 2*std(sds_p_int) | sds_p_int < median(sds_p_int) - 2*std(sds_p_int))))
gt_p_int(isnan(gt_p_int)) = []
gt_av(i) = median(gt_p_int(~(gt_p_int > median(gt_p_int) + 2*std(gt_p_int) | gt_p_int < median(gt_p_int) - 2*std(gt_p_int))))
idx_nan = isnan(diff_p)
diff_p(idx_nan) = []
xq(idx_nan) = []
idx_outlier = diff_p > median(diff_p) + 2*std(diff_p) | diff_p < median(diff_p) - 2*std(diff_p)
diff_p(idx_outlier) = []
xq(idx_outlier) = []
p_rmse(i) = sqrt(mean(diff_p.^2))
p_mean(i) = mean(diff_p)
p_std(i) = std(diff_p)
p_q90(i) = quantile(abs(diff_p), 0.9)
end
clearvars -except sds_av gt_av p_rmse p_mean p_std p_q90
save 'hor_error.mat'

@ -0,0 +1,213 @@
function [res,fval,it] = muller (f,Z0,itmax,ztol,ftol,option)
% MULLER find a zero of a real or complex function Y = F(Z)
%
% Syntax:
%
% RES = MULLER (F,Z0) find the zero of a complex or real function
% 'F' (either an anonymous function or .m function) using three initial
% guesses contained in the vector Z0. Muller takes the function F and
% evaluetes it at each initial point using feval. F doesn't need to be
% vectorized.
% The initial guesses can be real or complex numbers close to the zero,
% bracketing the zero is not necessary. Parameters ITMAX, ZTOL and
% FTOL are set by default to 1000, 1e-5 and 1e-5, respectively.
%
% RES = MULLER (F,Z0,ITMAX) the maximum number of iterations is set
% equal to ITMAX. ZTOL and FTOL are set by default with the values mentioned
% above.
%
% RES = MULLER (F,Z0,ITMAX,ZTOL) ZTOL is used as a stopping
% criterion. If the absolute difference between the values of Z found in
% the two latest iterations is less than ZTOL, the program is stopped. FTOL
% is set by default with the value mentioned above.
%
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL) FTOL is used as a stopping
% criterion. If the value of the function F at the Z found in the last
% iteration is less than FTOL, the program is stopped.
%
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL,'both') indicate that both
% criteria ZTOL and FTOL must be satisfied simultaneously. By default,
% MULLER stops if one of the two criteria is fulfilled.
%
% [RES,FVAL] = MULLER (F,Z0,...) return the value of the function
% F at the Z found in the last iteration.
%
% [RES,FVAL,IT] = MULLER (F,Z0,...) return the number of iterations
% used to find the zero.
%
% Example 1:
% myf = @(x) (x-1)^3;
%
% muller(myf,[0 0.1 0.2],[],[],[],'both')
% ans =
% 1.0000 + 0.0000i
%
% Example 2:
%
% [res,fval,it] = muller2('cosh',[0 0.1 0.2],[],[],[],'both')
%
% res =
% 0.0000 + 1.5708i
%
% fval =
% 5.5845e-012 + 3.0132e-012i
%
% it =
% 5
%
% Method taken from:
% Numerical Recipes: The art of scientific computing
% W.H. Press; B.P. Flannery; S.A. Teukolsky; W.T. Vetterling
% 1986
%
% Thanks to John D'Errico for his helpfull review.
%
% Written by Daniel H. Cortes
% MAE Department, West Virginia University
% March, 2008.
%
%=================================================
% Checking proper values of the input parameters
%=================================================
if nargin > 6
error ('Too many arguments.')
elseif nargin < 2
error('Too few arguments.')
end
if nargin < 6
opt = 1;
elseif ischar(option) == 1
if size(option,2) == 4
if sum(option == 'both') == 4
opt = 2;
else
error ('Option parameter must be *both*.')
end
else
error ('Option parameter must be *both*.')
end
else
error ('Option parameter must be a character array (string).')
end
if nargin < 5
ftol = 1e-5;
elseif isnumeric(ftol) ~= 1
error ('FTOL must be a numeric argument.')
elseif isempty(ftol) == 1
ftol = 1e-5;
elseif size(ftol,1) ~= 1 || size(ftol,2) ~= 1
error ('FTOL cannot be an array')
end
if nargin < 4
ztol = 1e-5;
elseif isnumeric(ztol) ~= 1
error ('ZTOL must be a numeric argument.')
elseif isempty(ztol) == 1
ztol = 1e-5;
elseif size(ztol,1) ~= 1 || size(ztol,2) ~= 1
error ('ZTOL cannot be an array.')
end
if nargin < 3
itmax = 1000;
elseif isnumeric(itmax) ~= 1
error ('ITMAX must be a numeric argument.')
elseif isempty(itmax) == 1
itmax = 1000;
elseif size(itmax,1) ~= 1 || size(itmax,2) ~= 1
error ('ITMAX cannot be an array.')
end
if isnumeric(Z0) ~= 1
error ('Z0 must be a vector of three numeric arguments.')
elseif isempty(Z0) == 1 || length(Z0) ~= 3 || min(size(Z0)) ~= 1
error ('Z0 must be a vector of length 3 of either complex or real arguments.')
end
if Z0(1)==Z0(2) || Z0(1)==Z0(3) || Z0(2)==Z0(3)
error('The initial guesses must be different')
end
%=============================
% Begining of Muller's method
%=============================
z0 = Z0(1);
z1 = Z0(2);
z2 = Z0(3);
y0 = feval ( f, z0);
y1 = feval ( f, z1);
y2 = feval ( f, z2);
for it = 1:itmax
q = (z2 - z1)/(z1 - z0);
A = q*y2 - q*(1+q)*y1 + q^2*y0;
B = (2*q + 1)*y2 - (1 + q)^2*y1 + q^2*y0;
C = (1 + q)*y2;
if ( A ~= 0 )
disc = B^2 - 4*A*C;
den1 = ( B + sqrt ( disc ) );
den2 = ( B - sqrt ( disc ) );
if ( abs ( den1 ) < abs ( den2 ) )
z3 = z2 - (z2 - z1)*(2*C/den2);
else
z3 = z2 - (z2 - z1)*(2*C/den1);
end
elseif ( B ~= 0 )
z3 = z2 - (z2 - z1)*(2*C/B);
else
warning('Muller Method failed to find a root. Last iteration result used as an output. Result may not be accurate')
res = z2;
fval = y2;
return
end
y3 = feval ( f, z3);
if opt == 1
if ( abs (z3 - z2) < ztol || abs ( y3 ) < ftol )
res = z3;
fval = y3;
return
end
else
if ( abs (z3 - z2) < ztol && abs ( y3 ) < ftol )
res = z3;
fval = y3;
return
end
end
z0 = z1;
z1 = z2;
z2 = z3;
y0 = y1;
y1 = y2;
y2 = y3;
end
res = z2;
fval = y2;
%warning('Maximum number of iterations reached. Result may not be accurate')

@ -0,0 +1,87 @@
function out = sort_back( data, ind, dim )
% SORT_BACK sort back data to original order
% ind is the indexes obtained from sorting
% dim is the sorted dimension of the data (assumed to be 1 if not specified)
% Ex:
% y = randn(3,4,2);
% [y,ind] = sort(y,2);
% do stuff with sorted y...
% y2 = sort_back( y, ind, 2 );
%
% Works on arrays of any dimension
% Also works on cellstrings (vectors)
%
% C = {'hello' 'yes' 'no' 'goodbye'};
% [C,ind] = sort(C);
% C2 = sort_back(C,ind);
%
% See also SORT
%Author Ivar Eskerud Smith
if size(ind)~=size(data)
error('Different size of indexes and input data');
end
if iscell(data)
if ~any(size(data)==1)
error('Only vectors are supported in cell sorting/back-sorting');
end
out=cell(size(data));
out(ind) = data;
return;
end
if ~isnumeric(data) || ~isnumeric(ind)
error('Inputs have to be numeric or cell');
end
n=ndims(ind);
if ~exist('dim','var')
dim=1;
end
if dim>n
error('Specified sorted dimension must be within array bounds');
end
%shift array so that the sorted dim is the first dimension
if dim~=1
sortInd=1:1:n;sortInd(1)=dim;sortInd(dim)=1;
data = permute(data,sortInd);
ind = permute(ind,sortInd);
end
inds = repmat({1},1,n);inds{1}=':';
if ~issorted( data(inds{:}) )
warning('The input data is not sorted along the specified dimension');
end
s = size(ind);
nData = numel(data);
inds = repmat({1},1,n);
inds(1:2)={':',':'};
shiftSize = s(1)*s(2);
out=nan(size(data));
%loop all 2d arrays within nd-array
for k=1:prod(s(3:end))
tmpdata = data(inds{:});
tmpind = ind(inds{:});
%data is shifted so that the sorted dim = 1
for i=1:numel(tmpdata(1,:))
out(tmpind(:,i),i) = tmpdata(:,i);
end
if n>2
%shift to next 2d array within nd-array
shiftInds = mod((1:nData)-shiftSize-1,nData)+1;
out=reshape(out(shiftInds),s);
data=reshape(data(shiftInds),s);
ind=reshape(ind(shiftInds),s);
end
end
%permute back to original order
sortInd=1:1:ndims(out);sortInd(1)=dim;sortInd(dim)=1;
out = permute(out,sortInd);

@ -0,0 +1,117 @@
function out = xyz2spz(xyz_data,site)
%function out = xyz2spz(xyz_data,site)
%
%Function to transform (x,y,z) coordinates on an embayed beach to alongshore - cross-shore
%coordinates (s,p,z) using the log spiral, given by the equation
%r = r0*exp(A*theta). A = cot(alpha).
%
%xyz_data is a structure containing:
%
%xyz_data.x
%xyz_data.y
%xyz_data.z
%
%site is the name of the structure generated from the MALT graphical user interface
%
%Refer to paper
%
%Harley, M.D. and Turner,I.L. (2007) A simple data transformation technique
%for pre-processing survey data at embayed beaches, Coast. Eng.,
%doi:10.1016/j.coastaleng.2007.07.001, in press.
%
%Created by Mitch Harley
%8th August, 2005
%Last Modified 4th April, 2012
%----------------------------------------------------------------
%LOAD LOGSPIRAL-FIT PARAMETERS
eval(['load ' site ';'])
eval(['site = ' site ';'])
%Define origin and A of log spiral
origin = site.origin;
alph = site.alpha;
A = cot(alph*pi/180);
r0_origin = site.r0_origin;
%-----------------------------------------------------------------
%DO TRANSFORMATION
%Points need to be sorted prior to analysis %MDH 4/4/2012
aa = [xyz_data.x xyz_data.y xyz_data.z];
[sorted_points,Isort] = sortrows(aa);
%Convert xyz coordinates to polar coordinates
r = sqrt((sorted_points(:,1) - origin(1)).^2+(sorted_points(:,2) - origin(2)).^2);
theta = unwrap(atan2((sorted_points(:,2)-origin(2)),(sorted_points(:,1)-origin(1))) );
%Find constants delta and kappa
delta = pi/2+acot(A)-theta; %From Equation 5
kappa = r./(r0_origin*sin(pi/2-acot(A))); %From Equation 6
%Find theta_s by solving implicitly using fzero function
for i = 1:length(theta);
%Use muller function in case any complex solutions
theta_s(i,1) = muller(@(x) (x-(1/A)*log(kappa(i)*sin(delta(i)+x))),[theta(i)-pi/8 theta(i) theta(i)+pi/8]);%From Equation 6
end
%plot(theta_s*180/pi)
%Find r_s
r_s = r0_origin*exp(A*theta_s);%From Equation 1
%Find s
lamda = r0_origin*sec(acot(A));%From Equation 8
start_point = 0; %Can be changed to make a more suitable start point
s = lamda*(exp(A*theta_s)-exp(A*start_point));%From Equation 8
%Find p
p = r.*sin(theta-theta_s)./sin(pi/2-acot(A)); %From Equation 9
%Convert any complex numbers to real numbers
p = real(p);
s = real(s);
%Sort back points to get the right indices %MDH 4/4/2012
p = sort_back(p,Isort);
s = sort_back(s,Isort);
%-----------------------------------------------------------------
%POST-PROCESS DATA
%s data
if site.reverse_s ==0
s = s - site.startpoint;%Make minimum s == 0
elseif site.reverse_s ==1
s = -(s - site.startpoint);
end
%p data
if site.subtract_res ==1 %Add switch for user to subtract residuals or not - MDH 19/5/2010
[MIN,L] = min(site.boundary.s);
I = find(s<=MIN);
p(I) = p(I) - site.boundary.p(L);
[MAX,L] = max(site.boundary.s);
I = find(s>=MAX);
p(I) = p(I) - site.boundary.p(L);
I = find(s>MIN&s<MAX);
p(I) = p(I) - interp1(site.boundary.s,site.boundary.p,s(I));%Subtract logspiral errors from p data
end
if site.alpha<0
p = -p;
end
%-----------------------------------------------------------------
out.s = s;
out.p = p;
out.z = xyz_data.z;

@ -0,0 +1,166 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Make a gif of the satellite images
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import matplotlib.animation as manimation
import ee
import pdb
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
import imageio
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
#sitename = 'NARRA'
sitename = 'OLDBAR_inlet'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps) # sort timestamps since images are sorted in directory
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f)
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
refpoints = pickle.load(f)
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
cloud_cover_ts = []
date_acquired_ts = []
idx_skipped = []
idx_nocloud = []
t = []
shorelines = []
with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'rb') as f:
idx_nocloud = pickle.load(f)
for i in idx_nocloud:
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skipped cloud ' + str(i))
idx_skipped.append(i)
continue
# idx_nocloud.append(i)
# check if image for that date is already present and keep the one with less clouds
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
idx_samedate = idx_samedate[0]
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
if cloud_cover >= cloud_cover_ts[idx_samedate]:
print('skipped double ' + str(i))
idx_skipped.append(i)
continue
else:
del shorelines[idx_samedate]
del t[idx_samedate]
del cloud_cover_ts[idx_samedate]
del date_acquired_ts[idx_samedate]
print('deleted ' + str(idx_samedate))
# rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
# calculate NDWI
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
# detect edges
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
# convert from pixels to world coordinates
wl_coords = sds.convert_pix2world(wl_pix, georef)
# convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
# save images as png for video
fig = plt.figure()
plt.grid(False)
plt.imshow(im_ms_ps[:,:,[2,1,0]], animated=True)
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.title(file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10])
plt.xticks([])
plt.yticks([])
plt.axis('equal')
plt.tight_layout()
plt.draw()
plt.savefig(os.path.join(filepath,
'plots', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.png'),
dpi = 300)
plt.close()
# create gif
images = []
filenames = os.listdir(os.path.join(filepath, 'plots'))
with imageio.get_writer('movie.gif', mode='I', duration=0.2) as writer:
for filename in filenames:
image = imageio.imread(os.path.join(filepath,'plots',filename))
writer.append_data(image)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
import os
import numpy as np
import matplotlib.pyplot as plt
import pdb
import ee
import matplotlib.dates as mdates
import matplotlib.cm as cm
from datetime import datetime, timedelta
import pickle
import pytz
import scipy.io as sio
import scipy.interpolate as interpolate
import statsmodels.api as sm
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
au_tz = pytz.timezone('Australia/Sydney')
# load timestamps from satellite images
satname = 'L8'
sitename = 'OLDBAR'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
output = pickle.load(f)
dates_l8 = output['t']
# convert to AEST
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
# get the satellite shorelines
sl = output['shorelines']
# load narrabeen beach points (manually digitized)
with open(os.path.join(os.getcwd(), 'olddata', 'oldbar_beach' + '.pkl'), 'rb') as f:
narrabeach = pickle.load(f)
dist_thresh = 250
frac_smooth = 1./12
plt.figure()
plt.axis('equal')
for i in range(1):
# select point of sds that are close to the manually digitized points
idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
plt.plot(sl[i][:,0], sl[i][:,1])
plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1])
# smooth (LOWESS) satellite shoreline
sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 10)
sl_smooth = sl_smooth[:,[1,0]]
plt.plot(sl_smooth[:,0], sl_smooth[:,1])
plt.draw()

@ -36,7 +36,7 @@ ee.Initialize()
#%% Select images
# parameters
plot_bool = False # if you want the plots
plot_bool = True # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100 # minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
@ -44,23 +44,29 @@ cloud_threshold = 0.8
# select collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
satname = 'L8'
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
#rect_narra = [[[151.3473129272461,-33.69035274454718],
# [151.2820816040039,-33.68206818063878],
# [151.27281188964844,-33.74775138989556],
# [151.3425064086914,-33.75231878701767],
# [151.3473129272461,-33.69035274454718]]];
#rect_narra = [[[151.301454, -33.700754],
# [151.311453, -33.702075],
# [151.307237, -33.739761],
# [151.294220, -33.736329],
# [151.301454, -33.700754]]];
# location (Oldbar NSW)
rect_narra = [[[152.578395, -31.841216],
[152.777281, -31.842523],
[152.738086, -32.028773],
[152.557812, -32.004663],
[152.578395, -31.841216]]];
# Dates
start_date = '2016-01-01'
end_date = '2016-12-31'
start_date = '2018-01-18'
end_date = '2018-01-20'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra)).filterDate(start_date, end_date)
@ -71,7 +77,7 @@ im_all = flt_col.getInfo().get('features')
# find each image in ee database
im = ee.Image(im_all[0].get('id'))
# load image as np.array
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, satname, plot_bool)
# rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
@ -84,7 +90,7 @@ plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
plt.show()
pts = ginput(15)
pts = ginput(n=20, timeout=1000, show_clicks=True)
points = np.array(pts)
plt.plot(points[:,0], points[:,1], 'ko')
plt.show()
@ -92,5 +98,13 @@ plt.show()
pts_coords = sds.convert_pix2world(points[:,[1,0]], crs['crs_15m'])
pts = sds.convert_epsg(pts_coords, crs['epsg_code'], output_epsg)
with open('data/narra_beach.pkl', 'wb') as f:
with open('olddata/oldbar_beach.pkl', 'wb') as f:
pickle.dump(pts, f)
#pts_wgs84 = sds.convert_epsg(pts_coords, crs['epsg_code'], 4326)
#
#import simplekml
#kml = simplekml.Kml()
#kml.new(name='test', coords=pts_wgs84)
#kml.save("test.kml")

Binary file not shown.

@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Process shorelines (clipping and smoothing)
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import pdb
import ee
import matplotlib.dates as mdates
import matplotlib.cm as cm
import matplotlib.colors as mcolor
from datetime import datetime, timedelta
import pickle
import pytz
import scipy.io as sio
import scipy.interpolate as interpolate
import statsmodels.api as sm
import skimage.measure as measure
import simplekml
# my functions
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
au_tz = pytz.timezone('Australia/Sydney')
au_epsg = 28356
# load the satellite-derived shorelines
satname = 'L8'
sitename = 'OLDBAR'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
output = pickle.load(f)
sl = output['shorelines']
dates_sl = output['t']
# convert to AEST
dates_sl = [_.astimezone(au_tz) for _ in dates_sl]
# load the reference shoreline points
with open(os.path.join(os.getcwd(), 'data', satname, sitename, sitename + '_refpoints.pkl'), 'rb') as f:
refpoints = pickle.load(f)
dist_thresh = 200
frac_smooth = 1./15
plt.figure()
plt.axis('equal')
cmap = cm.get_cmap('brg')
colours = cmap(np.linspace(0, 1, num=len(sl)))
kml = simplekml.Kml()
for i in range(len(sl)):
# select points of SDS that are close to the manually digitized points
idx_ref = [np.min(np.linalg.norm(sl[i][k,:] - refpoints, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
# smooth (LOWESS) satellite shoreline
sl_smooth = sm.nonparametric.lowess(sl[i][idx_ref,0],sl[i][idx_ref,1], frac=frac_smooth, it = 10)
sl_smooth = sl_smooth[:,[1,0]]
# sl_smooth = sl[i][idx_ref,:]
# plt.plot(sl[i][idx_ref,0],sl[i][idx_ref,1], 'k-')
plt.plot(sl_smooth[:,0], sl_smooth[:,1], color=colours[i,:], label=dates_sl[i].strftime('%d-%b-%Y'))
# convert to wgs84 (epsg = 4326)
sl_wgs84 = sds.convert_epsg(sl_smooth, 28356, 4326)
# save in kml file
ln = kml.newlinestring(name=dates_sl[i].strftime('%d-%b-%Y'))
ln.coords = sl_wgs84
ln.style.labelstyle.color = mcolor.rgb2hex(colours[i,:3])
ln.style.linestyle.color = mcolor.rgb2hex(colours[i,:3])
plt.legend(ncol=3)
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.title('Oldbar inlet (South)')
plt.draw()
kml.save(satname + sitename + '_shorelines.kml')

@ -1,9 +1,8 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 27 17:12:35 2018
@author: Kilian
"""
#==========================================================#
# Extract shorelines from Landsat images
#==========================================================#
# Initial settings
import os
@ -25,7 +24,6 @@ import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils
import functions.sds as sds
@ -38,24 +36,27 @@ ee.Initialize()
# parameters
cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
sitename = 'NARRA'
#sitename = 'NARRA'
sitename = 'OLDBAR'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
timestamps_sorted = sorted(timestamps) # sort timestamps since images are sorted in directory
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f)
with open(os.path.join(filepath, sitename + '_refpoints' + '.pkl'), 'rb') as f:
refpoints = pickle.load(f)
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'ms')
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
@ -64,7 +65,7 @@ N = len(file_names_pan)
cloud_cover_ts = []
date_acquired_ts = []
idx_skipped = []
idx_nocloud = []
t = []
shorelines = []
@ -98,8 +99,9 @@ for i in range(N):
print('skipped cloud ' + str(i))
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# check if image for that date is already present and keep the one with less clouds
if file_names_pan[i][9:19] in date_acquired_ts:
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
idx_samedate = idx_samedate[0]
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
@ -136,7 +138,7 @@ for i in range(N):
plt.subplot(121)
plt.imshow(im_ms_ps[:,:,[2,1,0]])
for j,contour in enumerate(wl_pix):
colours = cmap(np.linspace(0, 1, num=len(wl)))
colours = cmap(np.linspace(0, 1, num=len(wl_pix)))
plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color=colours[j,:])
plt.axis('image')
plt.title(file_names_pan[i])
@ -147,6 +149,7 @@ for i in range(N):
centroids.append([np.mean(contour[:, 0]),np.mean(contour[:, 1])])
plt.plot(contour[:, 0], contour[:, 1], linewidth=2, color=colours[j,:])
plt.plot(np.mean(contour[:, 0]), np.mean(contour[:, 1]), 'o', color=colours[j,:])
plt.plot(refpoints[:,0], refpoints[:,1], 'k.')
plt.axis('equal')
plt.title(file_names_pan[i])
mng = plt.get_current_fig_manager()
@ -154,27 +157,32 @@ for i in range(N):
plt.tight_layout()
plt.draw()
# click on the left image to discard, otherwise on the closest centroid in the right image
pt_in = np.array(ginput(1))
pt_in = np.array(ginput(n=1, timeout=1000))
if pt_in[0][0] < 10000:
print('skipped manual ' + str(i))
idx_skipped.append(i)
continue
# get contour that was selected (clock closest to centroid)
dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids]
shorelines.append(wl[np.argmin(dist_centroid)])
t.append(timestamps_sorted[i])
cloud_cover_ts.append(cloud_cover)
date_acquired_ts.append(file_names_pan[i][9:19])
#plt.figure()
#plt.axis('equal')
#for j in range(len(shorelines)):
# plt.plot(shorelines[j][:,0], shorelines[j][:,1])
#plt.draw()
output = {'t':t, 'shorelines':shorelines}
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts}
with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'wb') as f:
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'wb') as f:
pickle.dump(output, f)
with open(os.path.join(filepath, sitename + '_skipped' + '.pkl'), 'wb') as f:
pickle.dump(idx_skipped, f)
with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'wb') as f:
pickle.dump(idx_nocloud, f)

@ -1,9 +1,10 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 16:15:51 2018
@author: z5030440
"""
#==========================================================#
# Compare Narrabeen SDS with 3D quadbike surveys
#==========================================================#
# Initial settings
import os
import numpy as np
@ -25,6 +26,7 @@ import skimage.measure as measure
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
@ -48,7 +50,7 @@ dates_l8 = output['t']
# convert to AEST
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
# load wave data
# load wave data (already AEST)
filename = 'data\wave\SydneyProcessed.mat'
filepath = os.path.join(os.getcwd(), filename)
wave_data = sio.loadmat(filepath)
@ -60,7 +62,7 @@ dates_wave = [datetime(wave_data['dates'][i,0], wave_data['dates'][i,1],
wave_data['dates'][i,4], wave_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
# load tide data
# load tide data (already AEST)
filename = 'SydTideData.mat'
filepath = os.path.join(os.getcwd(), 'data', 'tide', filename)
tide_data = sio.loadmat(filepath)
@ -71,7 +73,7 @@ dates_tide = [datetime(tide_data['dates'][i,0], tide_data['dates'][i,1],
tide_data['dates'][i,4], tide_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
#%% make a plot of all the dates
#%% make a plot of all the dates with wave data
orange = [255/255,140/255,0]
blue = [0,191/255,255/255]
f = plt.figure()
@ -110,11 +112,14 @@ for k in range(len(years)):
ax.xaxis.set_major_formatter(month_fmt)
f.subplots_adjust(hspace=0.2)
plt.draw()
#%% calculate days difference
#%% calculate difference between dates (quad and sat)
diff_days = [ [(x - _).days for _ in dates_quad] for x in dates_l8]
max_diff = 10
idx_closest = [utils.find_indices(_, lambda e: abs(e) <= max_diff) for _ in diff_days]
# store in dates_diff dictionnary
dates_diff = []
cloud_cover = []
for i in range(len(idx_closest)):
if not idx_closest[i]:
continue
@ -130,6 +135,13 @@ for i in range(len(idx_closest)):
"date quad": dates_quad[idx_closest[i][0]],
"days diff": diff_days[i][idx_closest[i][0]]
})
# store cloud data
cloud_cover.append(output['cloud_cover'][i])
# store wave data
wave_hsig = []
for i in range(len(dates_diff)):
wave_hsig.append(hsig[np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).total_seconds() for _ in dates_wave])))])
# make a plot
plt.figure()
@ -165,48 +177,61 @@ for i in range(len(dates_diff)):
plt.draw()
plt.gcf().subplots_adjust(hspace=0.5)
# mean day difference
np.mean([ np.abs(_['days diff']) for _ in dates_diff])
#%% compare shorelines
dist_thresh = 200 # maximum distance between an sds point and a narrabeen point
#%% Compare shorelines in elevation
dist_thresh = 200 # maximum distance between an sds point and a narrabeen point
frac_smooth = 1./10 # fraction of the data used for smoothing (the bigger the smoother)
dist_buffer = 50 # buffer of points selected for interpolation
dist_buffer = 50 # buffer of points selected for interpolation
# load quadbike .mat files
foldername = 'data\quadbike\surveys3D'
folderpath = os.path.join(os.getcwd(), foldername)
filenames = os.listdir(folderpath)
# load the satellite shorelines
# get the satellite shorelines
sl = output['shorelines']
# load narrabeen beach points (manually digitized)
with open(os.path.join(os.getcwd(), 'olddata', 'narra_beach' + '.pkl'), 'rb') as f:
narrabeach = pickle.load(f)
# get dates from filenames
dates_quad = [datetime(int(_[6:10]), int(_[11:13]), int(_[14:16]), tzinfo= au_tz) for _ in filenames]
zav = []
ztide = []
sl_gt = []
sl_sds = []
for i in range(len(dates_diff)):
# select closest 3D survey
# select closest 3D survey and load .mat file
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).days for _ in dates_quad])))
survey3d = sio.loadmat(os.path.join(folderpath, filenames[idx_closest]))
# reshape to a vector
xs = survey3d['x'].reshape(survey3d['x'].shape[0] * survey3d['x'].shape[1])
ys = survey3d['y'].reshape(survey3d['y'].shape[0] * survey3d['y'].shape[1])
zs = survey3d['z'].reshape(survey3d['z'].shape[0] * survey3d['z'].shape[1])
# remove nan values
idx_nan = np.isnan(zs)
xs = xs[~idx_nan]
ys = ys[~idx_nan]
zs = zs[~idx_nan]
# smooth (LOWESS) satellite shoreline
# select point of sds that are close to the manually digitized points
idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
# smooth (LOWESS) satellite shoreline
sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 6)
sl_smooth = sl_smooth[:,[1,0]]
sl_sds.append(sl_smooth)
# find water level at the time the image was acquired
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).total_seconds() for _ in dates_tide])))
tide_level = tide[idx_closest]
ztide.append(tide_level)
# find contour corresponding to the water level
# find contour corresponding to the water level on 3D surface (if below minimum, add 0.05m increments)
if tide_level < np.nanmin(survey3d['z']):
tide_level = np.nanmin(survey3d['z'])
sl_tide = measure.find_contours(survey3d['z'], tide_level)
@ -217,16 +242,19 @@ for i in range(len(dates_diff)):
tide_level = tide_level + 0.05*count
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
print(str(0.05*count) + ' - ' + str(len(sl_tide)))
print('added ' + str(0.05*count) + ' cm - contour with ' + str(len(sl_tide)) + ' points')
else:
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
# remove nans
if np.any(np.isnan(sl_tide)):
index_nan = np.where(np.isnan(sl_tide))[0]
sl_tide = np.delete(sl_tide, index_nan, axis=0)
# get x,y coordinates
xtide = [survey3d['x'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
ytide = [survey3d['y'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
# interpolate SDS on 3D surface to get elevation
sl_gt.append(np.transpose(np.array([np.array(xtide), np.array(ytide)])))
# interpolate SDS on 3D surface to get elevation (point by point)
zq = np.zeros((sl_smooth.shape[0], 1))
for j in range(sl_smooth.shape[0]):
xq = sl_smooth[j,0]
@ -243,7 +271,9 @@ for i in range(len(dates_diff)):
# plt.plot(xq,yq,'ro')
# plt.draw()
# store the alongshore median elevation
zav.append(np.median(utils.reject_outliers(zq, m=2)))
# make plot
red = [255/255, 0, 0]
gray = [0.75, 0.75, 0.75]
@ -254,7 +284,7 @@ for i in range(len(dates_diff)):
label='3D survey')
plt.plot(xtide, ytide, '--', color=gray, linewidth=2.5, label='tide level contour')
plt.plot(sl_smooth[:,0], sl_smooth[:,1], '-', color=red, linewidth=2.5, label='SDS')
# plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1], 'go', markersize=3)
# plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1], 'w-', linewidth=2)
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.title('Shoreline comparison')
@ -276,15 +306,35 @@ for i in range(len(dates_diff)):
print(i)
# Calculate some error statistics
#%% Calculate some error statistics
zav = np.array(zav)
ztide = np.array(ztide)
plt.figure()
plt.plot(zav - ztide)
f = plt.figure()
plt.subplot(3,1,1)
plt.bar(np.linspace(1,len(zav),len(zav)), zav-ztide)
plt.ylabel('Error in z [m]')
plt.title('Elevation error')
plt.xticks([])
plt.draw()
plt.subplot(3,1,2)
plt.bar(np.linspace(1,len(zav),len(zav)), wave_hsig, color=orange)
plt.ylabel('Hsig [m]')
plt.xticks([])
plt.draw()
plt.subplot(3,1,3)
plt.bar(np.linspace(1,len(zav),len(zav)), np.array(cloud_cover)*100, color='g')
plt.ylabel('Cloud cover %')
plt.xlabel('comparison #')
plt.grid(False)
plt.grid(axis='y')
f.subplots_adjust(hspace=0)
plt.draw()
zav - ztide
np.sqrt(np.mean((zav - ztide)**2))
#%% plot to show LOWESS smoothing
#i = 0
#idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]

Loading…
Cancel
Save