forked from kilianv/CoastSat_WRL
updated most functions and workflow
parent
7ca64f2fa0
commit
0e78aace0c
@ -1,3 +1,6 @@
|
||||
*.pyc
|
||||
*.mat
|
||||
*.tif
|
||||
*.png
|
||||
*.mp4
|
||||
*.gif
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,106 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Draw reference points on satellite image
|
||||
#==========================================================#
|
||||
|
||||
# Preamble
|
||||
import os
|
||||
import ee
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.cm as cm
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
import pickle
|
||||
import pdb
|
||||
import pytz
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.morphology as morphology
|
||||
import skimage.measure as measure
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
# collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
||||
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
#polygon = [[[151.301454, -33.700754],
|
||||
# [151.311453, -33.702075],
|
||||
# [151.307237, -33.739761],
|
||||
# [151.294220, -33.736329],
|
||||
# [151.301454, -33.700754]]];
|
||||
# location (Oldbar shoreline)
|
||||
#polygon = [[[152.664508, -31.896163],
|
||||
# [152.665827, -31.897112],
|
||||
# [152.631516, -31.924846],
|
||||
# [152.629285, -31.923362],
|
||||
# [152.664508, -31.896163]]];
|
||||
# location (Oldbar inlet)
|
||||
polygon = [[[152.676283, -31.866784],
|
||||
[152.709174, -31.869993],
|
||||
[152.678229, -31.892082],
|
||||
[152.670366, -31.886360],
|
||||
[152.676283, -31.866784]]];
|
||||
|
||||
# dates
|
||||
start_date = '2017-01-30'
|
||||
end_date = '2017-02-02'
|
||||
#start_date = '2017-01-30'
|
||||
#end_date = '2018-02-02'
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering the area:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA'
|
||||
sitename = 'OLDBAR_inlet'
|
||||
|
||||
# parameters
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100 # minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
cloud_threshold = 0.8
|
||||
|
||||
# find image in ee database
|
||||
im = ee.Image(im_all[0].get('id'))
|
||||
# load image as np.array
|
||||
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, polygon, satname, plot_bool)
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.show()
|
||||
|
||||
pts = ginput(n=50, timeout=1000, show_clicks=True)
|
||||
points = np.array(pts)
|
||||
plt.plot(points[:,0], points[:,1], 'ko')
|
||||
plt.show()
|
||||
|
||||
pts_coords = sds.convert_pix2world(points[:,[1,0]], crs['crs_15m'])
|
||||
pts = sds.convert_epsg(pts_coords, crs['epsg_code'], output_epsg)
|
||||
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints2.pkl'), 'wb') as f:
|
||||
pickle.dump(pts, f)
|
@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Thu Apr 5 16:19:31 2018
|
||||
|
||||
@author: z5030440
|
||||
"""
|
||||
|
||||
d_gt = {'arr':sl_gt}
|
||||
d_sds = {'arr':sl_sds}
|
||||
|
||||
sio.savemat('sl_gt.mat', mdict=d_gt)
|
||||
sio.savemat('sl_sds.mat', mdict=d_sds)
|
||||
|
||||
#%%
|
||||
|
||||
herror = sio.loadmat('hor_error.mat')
|
||||
|
||||
diff_p = (herror['gt_av'] - herror['sds_av'])[0,:]
|
||||
|
||||
f = plt.figure()
|
||||
plt.subplot(3,1,1)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_rmse'][0])
|
||||
plt.ylabel('rmse [m]')
|
||||
plt.xticks([])
|
||||
plt.title('Horizontal cross-shore error')
|
||||
|
||||
plt.subplot(3,1,2)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_mean'][0], color=orange)
|
||||
plt.ylabel('mean [m]')
|
||||
plt.xticks([])
|
||||
|
||||
plt.subplot(3,1,3)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_std'][0], color='g')
|
||||
plt.ylabel('std [m]')
|
||||
plt.xlabel('comparison #')
|
||||
plt.grid(False)
|
||||
plt.grid(axis='y')
|
||||
f.subplots_adjust(hspace=0.2)
|
||||
plt.draw()
|
@ -0,0 +1,65 @@
|
||||
close all
|
||||
clear
|
||||
clc
|
||||
|
||||
addpath(genpath('C:\Users\z5030440\Documents\GitHub\geetools\functions\xyz2spz'))
|
||||
|
||||
sl_gt = load('sl_gt.mat')
|
||||
sl_sds = load('sl_sds.mat')
|
||||
|
||||
sl_sds = sl_sds.arr
|
||||
sl_gt = sl_gt.arr
|
||||
for i = 1:length(sl_sds)
|
||||
sds.x = sl_sds{i}(:,1)
|
||||
sds.y = sl_sds{i}(:,2)
|
||||
sds.z = zeros(length(sl_sds{i}(:,1)),1)
|
||||
|
||||
gt.x = sl_gt{i}(:,1)
|
||||
gt.y = sl_gt{i}(:,2)
|
||||
gt.z = zeros(length(sl_gt{i}(:,1)),1)
|
||||
|
||||
outsds = xyz2spz(sds,'NARRA')
|
||||
outgt = xyz2spz(gt,'NARRA')
|
||||
|
||||
figure
|
||||
hold on
|
||||
grid on
|
||||
box on
|
||||
plot(outsds.s, outsds.p, 'b-', 'linewidth',2)
|
||||
plot(outgt.s, outgt.p, 'r-', 'linewidth',2)
|
||||
xlabel('s [m]')
|
||||
ylabel('p [m]')
|
||||
title('Horizontal comparison in spz coordinates')
|
||||
legend({'SDS', 'contour at tide level'})
|
||||
|
||||
xq = nanmin(outsds.s):10:nanmax(outsds.s)
|
||||
[gt_s idx_gt] = unique(outgt.s)
|
||||
gt_p = outgt.p(idx_gt)
|
||||
gt_p_int = interp1(gt_s, gt_p, xq)
|
||||
[sds_s idx_sds] = unique(outsds.s)
|
||||
sds_p = outsds.p(idx_sds)
|
||||
sds_p_int = interp1(sds_s, sds_p, xq)
|
||||
diff_p = sds_p_int - gt_p_int;
|
||||
|
||||
sds_av(i) = median(sds_p_int(~(sds_p_int > median(sds_p_int) + 2*std(sds_p_int) | sds_p_int < median(sds_p_int) - 2*std(sds_p_int))))
|
||||
gt_p_int(isnan(gt_p_int)) = []
|
||||
gt_av(i) = median(gt_p_int(~(gt_p_int > median(gt_p_int) + 2*std(gt_p_int) | gt_p_int < median(gt_p_int) - 2*std(gt_p_int))))
|
||||
|
||||
|
||||
idx_nan = isnan(diff_p)
|
||||
diff_p(idx_nan) = []
|
||||
xq(idx_nan) = []
|
||||
idx_outlier = diff_p > median(diff_p) + 2*std(diff_p) | diff_p < median(diff_p) - 2*std(diff_p)
|
||||
diff_p(idx_outlier) = []
|
||||
xq(idx_outlier) = []
|
||||
|
||||
p_rmse(i) = sqrt(mean(diff_p.^2))
|
||||
p_mean(i) = mean(diff_p)
|
||||
p_std(i) = std(diff_p)
|
||||
p_q90(i) = quantile(abs(diff_p), 0.9)
|
||||
|
||||
end
|
||||
|
||||
clearvars -except sds_av gt_av p_rmse p_mean p_std p_q90
|
||||
|
||||
save 'hor_error.mat'
|
@ -0,0 +1,213 @@
|
||||
function [res,fval,it] = muller (f,Z0,itmax,ztol,ftol,option)
|
||||
% MULLER find a zero of a real or complex function Y = F(Z)
|
||||
%
|
||||
% Syntax:
|
||||
%
|
||||
% RES = MULLER (F,Z0) find the zero of a complex or real function
|
||||
% 'F' (either an anonymous function or .m function) using three initial
|
||||
% guesses contained in the vector Z0. Muller takes the function F and
|
||||
% evaluetes it at each initial point using feval. F doesn't need to be
|
||||
% vectorized.
|
||||
% The initial guesses can be real or complex numbers close to the zero,
|
||||
% bracketing the zero is not necessary. Parameters ITMAX, ZTOL and
|
||||
% FTOL are set by default to 1000, 1e-5 and 1e-5, respectively.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX) the maximum number of iterations is set
|
||||
% equal to ITMAX. ZTOL and FTOL are set by default with the values mentioned
|
||||
% above.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL) ZTOL is used as a stopping
|
||||
% criterion. If the absolute difference between the values of Z found in
|
||||
% the two latest iterations is less than ZTOL, the program is stopped. FTOL
|
||||
% is set by default with the value mentioned above.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL) FTOL is used as a stopping
|
||||
% criterion. If the value of the function F at the Z found in the last
|
||||
% iteration is less than FTOL, the program is stopped.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL,'both') indicate that both
|
||||
% criteria ZTOL and FTOL must be satisfied simultaneously. By default,
|
||||
% MULLER stops if one of the two criteria is fulfilled.
|
||||
%
|
||||
% [RES,FVAL] = MULLER (F,Z0,...) return the value of the function
|
||||
% F at the Z found in the last iteration.
|
||||
%
|
||||
% [RES,FVAL,IT] = MULLER (F,Z0,...) return the number of iterations
|
||||
% used to find the zero.
|
||||
%
|
||||
% Example 1:
|
||||
% myf = @(x) (x-1)^3;
|
||||
%
|
||||
% muller(myf,[0 0.1 0.2],[],[],[],'both')
|
||||
% ans =
|
||||
% 1.0000 + 0.0000i
|
||||
%
|
||||
% Example 2:
|
||||
%
|
||||
% [res,fval,it] = muller2('cosh',[0 0.1 0.2],[],[],[],'both')
|
||||
%
|
||||
% res =
|
||||
% 0.0000 + 1.5708i
|
||||
%
|
||||
% fval =
|
||||
% 5.5845e-012 + 3.0132e-012i
|
||||
%
|
||||
% it =
|
||||
% 5
|
||||
%
|
||||
% Method taken from:
|
||||
% Numerical Recipes: The art of scientific computing
|
||||
% W.H. Press; B.P. Flannery; S.A. Teukolsky; W.T. Vetterling
|
||||
% 1986
|
||||
%
|
||||
% Thanks to John D'Errico for his helpfull review.
|
||||
%
|
||||
% Written by Daniel H. Cortes
|
||||
% MAE Department, West Virginia University
|
||||
% March, 2008.
|
||||
%
|
||||
|
||||
|
||||
%=================================================
|
||||
% Checking proper values of the input parameters
|
||||
%=================================================
|
||||
|
||||
|
||||
if nargin > 6
|
||||
error ('Too many arguments.')
|
||||
elseif nargin < 2
|
||||
error('Too few arguments.')
|
||||
end
|
||||
|
||||
if nargin < 6
|
||||
opt = 1;
|
||||
elseif ischar(option) == 1
|
||||
if size(option,2) == 4
|
||||
if sum(option == 'both') == 4
|
||||
opt = 2;
|
||||
else
|
||||
error ('Option parameter must be *both*.')
|
||||
end
|
||||
else
|
||||
error ('Option parameter must be *both*.')
|
||||
end
|
||||
else
|
||||
error ('Option parameter must be a character array (string).')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 5
|
||||
ftol = 1e-5;
|
||||
elseif isnumeric(ftol) ~= 1
|
||||
error ('FTOL must be a numeric argument.')
|
||||
elseif isempty(ftol) == 1
|
||||
ftol = 1e-5;
|
||||
elseif size(ftol,1) ~= 1 || size(ftol,2) ~= 1
|
||||
error ('FTOL cannot be an array')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 4
|
||||
ztol = 1e-5;
|
||||
elseif isnumeric(ztol) ~= 1
|
||||
error ('ZTOL must be a numeric argument.')
|
||||
elseif isempty(ztol) == 1
|
||||
ztol = 1e-5;
|
||||
elseif size(ztol,1) ~= 1 || size(ztol,2) ~= 1
|
||||
error ('ZTOL cannot be an array.')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 3
|
||||
itmax = 1000;
|
||||
elseif isnumeric(itmax) ~= 1
|
||||
error ('ITMAX must be a numeric argument.')
|
||||
elseif isempty(itmax) == 1
|
||||
itmax = 1000;
|
||||
elseif size(itmax,1) ~= 1 || size(itmax,2) ~= 1
|
||||
error ('ITMAX cannot be an array.')
|
||||
end
|
||||
|
||||
|
||||
if isnumeric(Z0) ~= 1
|
||||
error ('Z0 must be a vector of three numeric arguments.')
|
||||
elseif isempty(Z0) == 1 || length(Z0) ~= 3 || min(size(Z0)) ~= 1
|
||||
error ('Z0 must be a vector of length 3 of either complex or real arguments.')
|
||||
end
|
||||
|
||||
if Z0(1)==Z0(2) || Z0(1)==Z0(3) || Z0(2)==Z0(3)
|
||||
error('The initial guesses must be different')
|
||||
end
|
||||
|
||||
%=============================
|
||||
% Begining of Muller's method
|
||||
%=============================
|
||||
|
||||
z0 = Z0(1);
|
||||
z1 = Z0(2);
|
||||
z2 = Z0(3);
|
||||
|
||||
y0 = feval ( f, z0);
|
||||
y1 = feval ( f, z1);
|
||||
y2 = feval ( f, z2);
|
||||
|
||||
for it = 1:itmax
|
||||
q = (z2 - z1)/(z1 - z0);
|
||||
A = q*y2 - q*(1+q)*y1 + q^2*y0;
|
||||
B = (2*q + 1)*y2 - (1 + q)^2*y1 + q^2*y0;
|
||||
C = (1 + q)*y2;
|
||||
|
||||
if ( A ~= 0 )
|
||||
|
||||
disc = B^2 - 4*A*C;
|
||||
|
||||
den1 = ( B + sqrt ( disc ) );
|
||||
den2 = ( B - sqrt ( disc ) );
|
||||
|
||||
if ( abs ( den1 ) < abs ( den2 ) )
|
||||
z3 = z2 - (z2 - z1)*(2*C/den2);
|
||||
else
|
||||
z3 = z2 - (z2 - z1)*(2*C/den1);
|
||||
end
|
||||
|
||||
elseif ( B ~= 0 )
|
||||
z3 = z2 - (z2 - z1)*(2*C/B);
|
||||
else
|
||||
warning('Muller Method failed to find a root. Last iteration result used as an output. Result may not be accurate')
|
||||
res = z2;
|
||||
fval = y2;
|
||||
return
|
||||
end
|
||||
|
||||
y3 = feval ( f, z3);
|
||||
|
||||
if opt == 1
|
||||
if ( abs (z3 - z2) < ztol || abs ( y3 ) < ftol )
|
||||
res = z3;
|
||||
fval = y3;
|
||||
return
|
||||
end
|
||||
else
|
||||
if ( abs (z3 - z2) < ztol && abs ( y3 ) < ftol )
|
||||
res = z3;
|
||||
fval = y3;
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
z0 = z1;
|
||||
z1 = z2;
|
||||
z2 = z3;
|
||||
|
||||
y0 = y1;
|
||||
y1 = y2;
|
||||
y2 = y3;
|
||||
|
||||
end
|
||||
|
||||
res = z2;
|
||||
fval = y2;
|
||||
%warning('Maximum number of iterations reached. Result may not be accurate')
|
||||
|
||||
|
||||
|
@ -0,0 +1,87 @@
|
||||
function out = sort_back( data, ind, dim )
|
||||
% SORT_BACK sort back data to original order
|
||||
% ind is the indexes obtained from sorting
|
||||
% dim is the sorted dimension of the data (assumed to be 1 if not specified)
|
||||
% Ex:
|
||||
% y = randn(3,4,2);
|
||||
% [y,ind] = sort(y,2);
|
||||
% do stuff with sorted y...
|
||||
% y2 = sort_back( y, ind, 2 );
|
||||
%
|
||||
% Works on arrays of any dimension
|
||||
% Also works on cellstrings (vectors)
|
||||
%
|
||||
% C = {'hello' 'yes' 'no' 'goodbye'};
|
||||
% [C,ind] = sort(C);
|
||||
% C2 = sort_back(C,ind);
|
||||
%
|
||||
% See also SORT
|
||||
|
||||
%Author Ivar Eskerud Smith
|
||||
|
||||
if size(ind)~=size(data)
|
||||
error('Different size of indexes and input data');
|
||||
end
|
||||
|
||||
if iscell(data)
|
||||
if ~any(size(data)==1)
|
||||
error('Only vectors are supported in cell sorting/back-sorting');
|
||||
end
|
||||
out=cell(size(data));
|
||||
out(ind) = data;
|
||||
return;
|
||||
end
|
||||
|
||||
if ~isnumeric(data) || ~isnumeric(ind)
|
||||
error('Inputs have to be numeric or cell');
|
||||
end
|
||||
|
||||
n=ndims(ind);
|
||||
if ~exist('dim','var')
|
||||
dim=1;
|
||||
end
|
||||
if dim>n
|
||||
error('Specified sorted dimension must be within array bounds');
|
||||
end
|
||||
|
||||
%shift array so that the sorted dim is the first dimension
|
||||
if dim~=1
|
||||
sortInd=1:1:n;sortInd(1)=dim;sortInd(dim)=1;
|
||||
data = permute(data,sortInd);
|
||||
ind = permute(ind,sortInd);
|
||||
end
|
||||
inds = repmat({1},1,n);inds{1}=':';
|
||||
if ~issorted( data(inds{:}) )
|
||||
warning('The input data is not sorted along the specified dimension');
|
||||
end
|
||||
|
||||
s = size(ind);
|
||||
nData = numel(data);
|
||||
inds = repmat({1},1,n);
|
||||
inds(1:2)={':',':'};
|
||||
shiftSize = s(1)*s(2);
|
||||
out=nan(size(data));
|
||||
|
||||
%loop all 2d arrays within nd-array
|
||||
for k=1:prod(s(3:end))
|
||||
tmpdata = data(inds{:});
|
||||
tmpind = ind(inds{:});
|
||||
|
||||
%data is shifted so that the sorted dim = 1
|
||||
for i=1:numel(tmpdata(1,:))
|
||||
out(tmpind(:,i),i) = tmpdata(:,i);
|
||||
end
|
||||
|
||||
if n>2
|
||||
%shift to next 2d array within nd-array
|
||||
shiftInds = mod((1:nData)-shiftSize-1,nData)+1;
|
||||
out=reshape(out(shiftInds),s);
|
||||
data=reshape(data(shiftInds),s);
|
||||
ind=reshape(ind(shiftInds),s);
|
||||
end
|
||||
end
|
||||
|
||||
%permute back to original order
|
||||
sortInd=1:1:ndims(out);sortInd(1)=dim;sortInd(dim)=1;
|
||||
out = permute(out,sortInd);
|
||||
|
@ -0,0 +1,117 @@
|
||||
function out = xyz2spz(xyz_data,site)
|
||||
|
||||
%function out = xyz2spz(xyz_data,site)
|
||||
%
|
||||
%Function to transform (x,y,z) coordinates on an embayed beach to alongshore - cross-shore
|
||||
%coordinates (s,p,z) using the log spiral, given by the equation
|
||||
%r = r0*exp(A*theta). A = cot(alpha).
|
||||
%
|
||||
%xyz_data is a structure containing:
|
||||
%
|
||||
%xyz_data.x
|
||||
%xyz_data.y
|
||||
%xyz_data.z
|
||||
%
|
||||
%site is the name of the structure generated from the MALT graphical user interface
|
||||
%
|
||||
%Refer to paper
|
||||
%
|
||||
%Harley, M.D. and Turner,I.L. (2007) A simple data transformation technique
|
||||
%for pre-processing survey data at embayed beaches, Coast. Eng.,
|
||||
%doi:10.1016/j.coastaleng.2007.07.001, in press.
|
||||
%
|
||||
%Created by Mitch Harley
|
||||
%8th August, 2005
|
||||
%Last Modified 4th April, 2012
|
||||
|
||||
%----------------------------------------------------------------
|
||||
%LOAD LOGSPIRAL-FIT PARAMETERS
|
||||
|
||||
eval(['load ' site ';'])
|
||||
eval(['site = ' site ';'])
|
||||
|
||||
%Define origin and A of log spiral
|
||||
origin = site.origin;
|
||||
alph = site.alpha;
|
||||
A = cot(alph*pi/180);
|
||||
r0_origin = site.r0_origin;
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
|
||||
%DO TRANSFORMATION
|
||||
|
||||
|
||||
%Points need to be sorted prior to analysis %MDH 4/4/2012
|
||||
aa = [xyz_data.x xyz_data.y xyz_data.z];
|
||||
[sorted_points,Isort] = sortrows(aa);
|
||||
|
||||
%Convert xyz coordinates to polar coordinates
|
||||
r = sqrt((sorted_points(:,1) - origin(1)).^2+(sorted_points(:,2) - origin(2)).^2);
|
||||
theta = unwrap(atan2((sorted_points(:,2)-origin(2)),(sorted_points(:,1)-origin(1))) );
|
||||
|
||||
|
||||
%Find constants delta and kappa
|
||||
delta = pi/2+acot(A)-theta; %From Equation 5
|
||||
kappa = r./(r0_origin*sin(pi/2-acot(A))); %From Equation 6
|
||||
|
||||
|
||||
%Find theta_s by solving implicitly using fzero function
|
||||
for i = 1:length(theta);
|
||||
%Use muller function in case any complex solutions
|
||||
theta_s(i,1) = muller(@(x) (x-(1/A)*log(kappa(i)*sin(delta(i)+x))),[theta(i)-pi/8 theta(i) theta(i)+pi/8]);%From Equation 6
|
||||
end
|
||||
|
||||
%plot(theta_s*180/pi)
|
||||
|
||||
%Find r_s
|
||||
r_s = r0_origin*exp(A*theta_s);%From Equation 1
|
||||
|
||||
%Find s
|
||||
lamda = r0_origin*sec(acot(A));%From Equation 8
|
||||
start_point = 0; %Can be changed to make a more suitable start point
|
||||
s = lamda*(exp(A*theta_s)-exp(A*start_point));%From Equation 8
|
||||
|
||||
%Find p
|
||||
p = r.*sin(theta-theta_s)./sin(pi/2-acot(A)); %From Equation 9
|
||||
|
||||
%Convert any complex numbers to real numbers
|
||||
p = real(p);
|
||||
s = real(s);
|
||||
|
||||
%Sort back points to get the right indices %MDH 4/4/2012
|
||||
p = sort_back(p,Isort);
|
||||
s = sort_back(s,Isort);
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
%POST-PROCESS DATA
|
||||
%s data
|
||||
|
||||
if site.reverse_s ==0
|
||||
s = s - site.startpoint;%Make minimum s == 0
|
||||
elseif site.reverse_s ==1
|
||||
s = -(s - site.startpoint);
|
||||
end
|
||||
|
||||
%p data
|
||||
if site.subtract_res ==1 %Add switch for user to subtract residuals or not - MDH 19/5/2010
|
||||
[MIN,L] = min(site.boundary.s);
|
||||
I = find(s<=MIN);
|
||||
p(I) = p(I) - site.boundary.p(L);
|
||||
[MAX,L] = max(site.boundary.s);
|
||||
I = find(s>=MAX);
|
||||
p(I) = p(I) - site.boundary.p(L);
|
||||
I = find(s>MIN&s<MAX);
|
||||
p(I) = p(I) - interp1(site.boundary.s,site.boundary.p,s(I));%Subtract logspiral errors from p data
|
||||
end
|
||||
|
||||
|
||||
if site.alpha<0
|
||||
p = -p;
|
||||
end
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
|
||||
out.s = s;
|
||||
out.p = p;
|
||||
out.z = xyz_data.z;
|
||||
|
@ -0,0 +1,166 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Make a gif of the satellite images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib
|
||||
matplotlib.use("Agg")
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.animation as manimation
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
import imageio
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA'
|
||||
sitename = 'OLDBAR_inlet'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps) # sort timestamps since images are sorted in directory
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'rb') as f:
|
||||
idx_nocloud = pickle.load(f)
|
||||
|
||||
for i in idx_nocloud:
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skipped cloud ' + str(i))
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
# idx_nocloud.append(i)
|
||||
# check if image for that date is already present and keep the one with less clouds
|
||||
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
||||
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
||||
idx_samedate = idx_samedate[0]
|
||||
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
||||
if cloud_cover >= cloud_cover_ts[idx_samedate]:
|
||||
print('skipped double ' + str(i))
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
del shorelines[idx_samedate]
|
||||
del t[idx_samedate]
|
||||
del cloud_cover_ts[idx_samedate]
|
||||
del date_acquired_ts[idx_samedate]
|
||||
print('deleted ' + str(idx_samedate))
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
# calculate NDWI
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
# detect edges
|
||||
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(wl_pix, georef)
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
||||
|
||||
# save images as png for video
|
||||
fig = plt.figure()
|
||||
plt.grid(False)
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]], animated=True)
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.title(file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10])
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('equal')
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
plt.savefig(os.path.join(filepath,
|
||||
'plots', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.png'),
|
||||
dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots'))
|
||||
with imageio.get_writer('movie.gif', mode='I', duration=0.2) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots',filename))
|
||||
writer.append_data(image)
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
import matplotlib.dates as mdates
|
||||
import matplotlib.cm as cm
|
||||
from datetime import datetime, timedelta
|
||||
import pickle
|
||||
import pytz
|
||||
import scipy.io as sio
|
||||
import scipy.interpolate as interpolate
|
||||
import statsmodels.api as sm
|
||||
import skimage.measure as measure
|
||||
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
|
||||
# load timestamps from satellite images
|
||||
satname = 'L8'
|
||||
sitename = 'OLDBAR'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
|
||||
output = pickle.load(f)
|
||||
dates_l8 = output['t']
|
||||
# convert to AEST
|
||||
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
|
||||
# get the satellite shorelines
|
||||
sl = output['shorelines']
|
||||
|
||||
# load narrabeen beach points (manually digitized)
|
||||
with open(os.path.join(os.getcwd(), 'olddata', 'oldbar_beach' + '.pkl'), 'rb') as f:
|
||||
narrabeach = pickle.load(f)
|
||||
|
||||
|
||||
dist_thresh = 250
|
||||
frac_smooth = 1./12
|
||||
plt.figure()
|
||||
plt.axis('equal')
|
||||
for i in range(1):
|
||||
# select point of sds that are close to the manually digitized points
|
||||
idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
|
||||
plt.plot(sl[i][:,0], sl[i][:,1])
|
||||
plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1])
|
||||
|
||||
|
||||
# smooth (LOWESS) satellite shoreline
|
||||
sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 10)
|
||||
sl_smooth = sl_smooth[:,[1,0]]
|
||||
|
||||
plt.plot(sl_smooth[:,0], sl_smooth[:,1])
|
||||
|
||||
|
||||
plt.draw()
|
Binary file not shown.
@ -0,0 +1,88 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Process shorelines (clipping and smoothing)
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
import matplotlib.dates as mdates
|
||||
import matplotlib.cm as cm
|
||||
import matplotlib.colors as mcolor
|
||||
from datetime import datetime, timedelta
|
||||
import pickle
|
||||
import pytz
|
||||
import scipy.io as sio
|
||||
import scipy.interpolate as interpolate
|
||||
import statsmodels.api as sm
|
||||
import skimage.measure as measure
|
||||
import simplekml
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
au_epsg = 28356
|
||||
# load the satellite-derived shorelines
|
||||
satname = 'L8'
|
||||
sitename = 'OLDBAR'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
|
||||
output = pickle.load(f)
|
||||
|
||||
sl = output['shorelines']
|
||||
dates_sl = output['t']
|
||||
# convert to AEST
|
||||
dates_sl = [_.astimezone(au_tz) for _ in dates_sl]
|
||||
|
||||
# load the reference shoreline points
|
||||
with open(os.path.join(os.getcwd(), 'data', satname, sitename, sitename + '_refpoints.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
|
||||
dist_thresh = 200
|
||||
frac_smooth = 1./15
|
||||
plt.figure()
|
||||
plt.axis('equal')
|
||||
cmap = cm.get_cmap('brg')
|
||||
colours = cmap(np.linspace(0, 1, num=len(sl)))
|
||||
kml = simplekml.Kml()
|
||||
for i in range(len(sl)):
|
||||
# select points of SDS that are close to the manually digitized points
|
||||
idx_ref = [np.min(np.linalg.norm(sl[i][k,:] - refpoints, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
|
||||
# smooth (LOWESS) satellite shoreline
|
||||
sl_smooth = sm.nonparametric.lowess(sl[i][idx_ref,0],sl[i][idx_ref,1], frac=frac_smooth, it = 10)
|
||||
sl_smooth = sl_smooth[:,[1,0]]
|
||||
# sl_smooth = sl[i][idx_ref,:]
|
||||
|
||||
# plt.plot(sl[i][idx_ref,0],sl[i][idx_ref,1], 'k-')
|
||||
plt.plot(sl_smooth[:,0], sl_smooth[:,1], color=colours[i,:], label=dates_sl[i].strftime('%d-%b-%Y'))
|
||||
|
||||
# convert to wgs84 (epsg = 4326)
|
||||
sl_wgs84 = sds.convert_epsg(sl_smooth, 28356, 4326)
|
||||
# save in kml file
|
||||
ln = kml.newlinestring(name=dates_sl[i].strftime('%d-%b-%Y'))
|
||||
ln.coords = sl_wgs84
|
||||
ln.style.labelstyle.color = mcolor.rgb2hex(colours[i,:3])
|
||||
ln.style.linestyle.color = mcolor.rgb2hex(colours[i,:3])
|
||||
|
||||
plt.legend(ncol=3)
|
||||
plt.xlabel('Eastings [m]')
|
||||
plt.ylabel('Northings [m]')
|
||||
plt.title('Oldbar inlet (South)')
|
||||
plt.draw()
|
||||
|
||||
kml.save(satname + sitename + '_shorelines.kml')
|
||||
|
||||
|
Loading…
Reference in New Issue