shoreline comparison

master
kvos 7 years ago
parent 67de70cbad
commit ec290ab323

@ -0,0 +1,222 @@
# -*- coding: utf-8 -*-
# Preamble
import ee
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import pandas as pd
from datetime import datetime
import pickle
import pdb
import pytz
from pylab import ginput
import scipy.io as sio
import scipy.interpolate
import os
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
au_tz = pytz.timezone('Australia/Sydney')
#%%
# load SDS shorelines
with open('data\data_gt_l8.pkl', 'rb') as f:
data = pickle.load(f)
# load quadbike dates and convert from datenum to datetime
suffix = '.mat'
dir_name = os.getcwd()
file_name = 'data\quadbike_dates'
file_path = os.path.join(dir_name, file_name + suffix)
quad_dates = sio.loadmat(file_path)['dates']
dt_quad = []
for i in range(quad_dates.shape[0]):
dt_quad.append(datetime(quad_dates[i,0], quad_dates[i,1], quad_dates[i,2], tzinfo=au_tz))
# remove overlapping images, keep the one with lowest cloud_cover
n = len(data['cloud_cover'])
idx_worst = []
for i in range(n):
date_im = data['date_acquired'][i]
idx_double = np.isin(data['date_acquired'], date_im)
if sum(idx_double.astype(int)) > 1:
idx_worst.append(np.where(idx_double)[0][np.argmax(np.array(data['cloud_cover'])[idx_double])])
dt_sat = []
new_meta = {'contours':[],
'cloud_cover':[],
'geom_rmse_model':[],
'gcp_model':[],
'quality':[],
'sun_azimuth':[],
'sun_elevation':[]}
for i in range(n):
if not np.isin(i,idx_worst):
dt_sat.append(data['dt'][i].astimezone(au_tz))
new_meta['contours'].append(data['contours'][i])
new_meta['cloud_cover'].append(data['cloud_cover'][i])
new_meta['geom_rmse_model'].append(data['geom_rmse_model'][i])
new_meta['gcp_model'].append(data['gcp_model'][i])
new_meta['quality'].append(data['quality'][i])
new_meta['sun_azimuth'].append(data['sun_azimuth'][i])
new_meta['sun_elevation'].append(data['sun_elevation'][i])
# calculate difference between days
diff_days = [ [(x - _).days for _ in dt_quad] for x in dt_sat]
day_thresh = 15
idx_close = [utils.find_indices(_, lambda e: abs(e) < day_thresh) for _ in diff_days]
# put everything in a dictionnary and save it
wl_comp = []
for i in range(len(dt_sat)):
wl_comp.append({'sat dt': dt_sat[i],
'quad dt': [dt_quad[_] for _ in idx_close[i]],
'days diff': [diff_days[i][_] for _ in idx_close[i]],
'contours': new_meta['contours'][i],
'cloud_cover': new_meta['cloud_cover'][i],
'geom_rmse_model': new_meta['geom_rmse_model'][i],
'gcp_model': new_meta['gcp_model'][i],
'quality': new_meta['quality'][i],
'sun_azimuth': new_meta['sun_azimuth'][i],
'sun_elevation': new_meta['sun_elevation'][i]})
with open('wl_l8_comparison.pkl', 'wb') as f:
pickle.dump(wl_comp, f)
#%%
with open('data\wl_l8_comparison.pkl', 'rb') as f:
wl = pickle.load(f)
# load quadbike dates and convert from datenum to datetime
suffix = '.mat'
dir_name = os.getcwd()
subfolder_name = 'data\quadbike_surveys'
file_path = os.path.join(dir_name, subfolder_name)
file_names = os.listdir(file_path)
for i in range(len(file_names)):
fn_mat = os.path.join(file_path, file_names[i])
years = int(file_names[i][6:10])
months = int(file_names[i][11:13])
days = int(file_names[i][14:16])
for j in range(len(wl)):
if wl[j]['quad dt'][0] == datetime(years, months, days, tzinfo=au_tz):
quad_mat = sio.loadmat(fn_mat)
wl[j].update({'quad_data':{'x':quad_mat['x'],
'y':quad_mat['y'],
'z':quad_mat['z'],
'dt': datetime(years, months, days, tzinfo=au_tz)}})
with open('data\wl_final.pkl', 'wb') as f:
pickle.dump(wl, f)
#%%
with open('data\wl_final.pkl', 'rb') as f:
wl = pickle.load(f)
i = 0
x = wl[i]['quad_data']['x']
y = wl[i]['quad_data']['y']
z = wl[i]['quad_data']['z']
x = x.reshape(x.shape[0] * x.shape[1])
y = y.reshape(y.shape[0] * y.shape[1])
z = z.reshape(z.shape[0] * z.shape[1])
idx_nan = np.isnan(z)
x_nan = x[idx_nan]
y_nan = y[idx_nan]
z_nan = z[idx_nan]
x_nonan = x[~idx_nan]
y_nonan = y[~idx_nan]
z_nonan = z[~idx_nan]
xs = x_nonan[::10]
ys = y_nonan[::10]
zs = z_nonan[::10]
xq = wl[i]['contours'][:,0]
yq = wl[i]['contours'][:,1]
# cut xq around xs
np.min(xs)
np.max(xs)
np.min(ys)
np.max(ys)
idx_x = np.logical_and(xq < np.max(xs), xq > np.min(xs))
idx_y = np.logical_and(yq < np.max(ys), yq > np.min(ys))
idx_in = np.logical_and(idx_x, idx_y)
xq = xq[idx_in]
yq = yq[idx_in]
for i in range(len(xq)):
idx_x = np.logical_and(xs < xq[i] + 10, xs > xq[i] - 10)
idx_y = np.logical_and(ys < yq[i] + 10, ys > yq[i] - 10)
xint = xs[idx_x]
yint = ys[idx_y]
f = interpolate.interp2d(xs, ys, zs, kind='linear')
zq = f(xq,yq)
plt.figure()
plt.grid()
plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'),
label='quad data')
plt.plot(xq,yq,'r-o', markersize=5, label='SDS')
plt.axis('equal')
plt.legend()
plt.colorbar(label='mAHD')
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.show()
plt.figure()
plt.plot(zq[:,0])
plt.show()
plt.figure()
plt.grid()
plt.scatter(x_nonan, y_nonan, s=10, c=z_nonan, marker='o', cmap=cm.get_cmap('jet'),
label='quad data')
#plt.plot(x_nan, y_nan, 'k.', label='nans')
plt.plot(xq,yq,'r-o', markersize=5, label='SDS')
plt.axis('equal')
plt.legend()
plt.colorbar(label='mAHD')
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.show()
z2 = scipy.interpolate.griddata([x, y], z, [xq, yq], method='linear')
f_interp = scipy.interpolate.interp2d(x1,y1,z1, kind='linear')
sio.savemat('shoreline1.mat', {'x':xq, 'y':yq})
from scipy import interpolate
x = np.arange(-5.01, 5.01, 0.01)
y = np.arange(-5.01, 5.01, 0.01)
xx, yy = np.meshgrid(x, y)
z = np.sin(xx**2+yy**2)
f = interpolate.interp2d(x, y, z, kind='cubic')
xnew = np.arange(-5.01, 5.01, 1e-2)
ynew = np.arange(-5.01, 5.01, 1e-2)
znew = f(xnew, ynew)
plt.plot(x, z[:, 0], 'ro-', xnew, znew[:, 0], 'b-')
plt.show()

@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# Preamble
import ee
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import pandas as pd
from datetime import datetime
import pickle
import pdb
import pytz
from pylab import ginput
import scipy.io as sio
import scipy.interpolate
import os
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
with open('data\wl_final.pkl', 'rb') as f:
wl = pickle.load(f)
i = 0
x = wl[i]['quad_data']['x']
y = wl[i]['quad_data']['y']
z = wl[i]['quad_data']['z']
x = x.reshape(x.shape[0] * x.shape[1])
y = y.reshape(y.shape[0] * y.shape[1])
z = z.reshape(z.shape[0] * z.shape[1])
idx_nan = np.isnan(z)
x = x[~idx_nan]
y = y[~idx_nan]
z = z[~idx_nan]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -52,3 +52,7 @@ def compare_images(im1, im2):
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1) ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
plt.imshow(im2, cmap='gray') plt.imshow(im2, cmap='gray')
plt.show() plt.show()
def find_indices(lst, condition):
"imitation of MATLAB find function"
return [i for i, elem in enumerate(lst) if condition(elem)]

@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 23 12:46:04 2018
@author: z5030440
"""
# Preamble
import ee
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import pandas as pd
from datetime import datetime
import pickle
import pdb
import pytz
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
#%% Select images
# parameters
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100 # minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
cloud_threshold = 0.8
# select collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
#rect_narra = [[[151.301454, -33.700754],
# [151.311453, -33.702075],
# [151.307237, -33.739761],
# [151.294220, -33.736329],
# [151.301454, -33.700754]]];
# Dates
start_date = '2016-01-01'
end_date = '2016-12-31'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra)).filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
# find each image in ee database
im = ee.Image(im_all[0].get('id'))
# load image as np.array
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
# rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
plt.show()
pts = ginput(15)
points = np.array(pts)
plt.plot(points[:,0], points[:,1], 'ko')
plt.show()
pts_coords = sds.convert_pix2world(points[:,[1,0]], crs['crs_15m'])
pts = sds.convert_epsg(pts_coords, crs['epsg_code'], output_epsg)
with open('data/narra_beach.pkl', 'wb') as f:
pickle.dump(pts, f)

@ -0,0 +1,119 @@
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 1 14:32:08 2018
@author: z5030440
Main code to extract shorelines from Landsat imagery
"""
# Preamble
import ee
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from datetime import datetime
import pytz
import pdb
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
# parameters
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100 # minimum number of points contained in each water line
# select collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
# Dates
start_date = '2016-01-01'
end_date = '2016-12-31'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
props = {'cloud_cover_cropped':[],
'cloud_cover':[],
'cloud_cover_land':[],
'date_acquired':[],
'geom_rmse_model':[],
'geom_rmse_verify':[],
'gcp_model':[],
'gcp_verify':[],
'quality':[],
'sun_azimuth':[],
'sun_elevation':[]}
t = []
# loop through all images
for i in range(n_img):
# find each image in ee database
im = ee.Image(im_all[i].get('id'))
im_bands = im_all[i].get('bands')
im_props = im_all[i]['properties']
# compute cloud cover on cropped image
for j in range(len(im_bands)): del im_bands[j]['dimensions']
qa_band = [im_bands[11]]
im_qa, crs_qa = sds.load_image(im, rect_narra, qa_band)
im_qa = im_qa[:,:,0]
im_cloud = sds.create_cloud_mask(im_qa)
props['cloud_cover_cropped'].append(100*sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]))
# extract image metadata
props['cloud_cover'].append(im_props['CLOUD_COVER'])
props['cloud_cover_land' ].append(im_props['CLOUD_COVER_LAND'])
props['date_acquired'].append(im_props['DATE_ACQUIRED'])
props['geom_rmse_model'].append(im_props['GEOMETRIC_RMSE_MODEL'])
props['gcp_model'].append(im_props['GROUND_CONTROL_POINTS_MODEL'])
props['quality'].append(im_props['IMAGE_QUALITY_OLI'])
props['sun_azimuth'].append(im_props['SUN_AZIMUTH'])
props['sun_elevation'].append(im_props['SUN_ELEVATION'])
# try structure as sometimes the geometry cannot be verified
try:
props['geom_rmse_verify'].append(im_props['GEOMETRIC_RMSE_VERIFY'])
props['gcp_verify'].append(im_props['GROUND_CONTROL_POINTS_VERIFY'])
except:
props['geom_rmse_verify'].append(np.nan)
props['gcp_verify'].append(np.nan)
# record exact time of acquisition
t.append(im_props['system:time_start'])
#%% create pd.DataFrame with datetime index
dt = [];
fmt = '%Y-%m-%d %H:%M:%S %Z%z'
au_tz = pytz.timezone('Australia/Sydney')
for k in range(len(t)): dt.append(datetime.fromtimestamp(t[k]/1000, tz=au_tz))
df = pd.DataFrame(data = props, index=dt , columns=list(props.keys()))
df.to_pickle('meta_l8.pkl')
#df['cloud_cover_cropped'].groupby(df.index.month).count().plot.bar()
#df_monthly = df['cloud_cover_cropped'].groupby(df.index.month)

@ -52,11 +52,16 @@ rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.27281188964844,-33.74775138989556], [151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767], [151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]]; [151.3473129272461,-33.69035274454718]]];
with open('data/narra_beach.pkl', 'rb') as f:
pts_beach = pickle.load(f)
#rect_narra = [[[151.301454, -33.700754], #rect_narra = [[[151.301454, -33.700754],
# [151.311453, -33.702075], # [151.311453, -33.702075],
# [151.307237, -33.739761], # [151.307237, -33.739761],
# [151.294220, -33.736329], # [151.294220, -33.736329],
# [151.301454, -33.700754]]]; # [151.301454, -33.700754]]];
# Dates # Dates
start_date = '2016-01-01' start_date = '2016-01-01'
end_date = '2016-12-31' end_date = '2016-12-31'
@ -80,40 +85,86 @@ skipped_images = np.zeros((n_img,1)).astype(bool)
output_wl = [] output_wl = []
# loop through all images # loop through all images
for i in range(n_img): for i in range(n_img):
# find each image in ee database # find each image in ee database
im = ee.Image(im_all[i].get('id')) im = ee.Image(im_all[i].get('id'))
# load image as np.array # load image as np.array
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool) im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
# if 100% cloud
# if clouds -> skip the image
if sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]) > cloud_threshold: if sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]) > cloud_threshold:
skipped_images[i] = True skipped_images[i] = True
continue continue
# store metadata of each image in dict
metadata['timestamp'].append(meta['timestamp'])
metadata['date_acquired'].append(meta['date_acquired'])
metadata['cloud_cover'].append(sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]))
metadata['geom_rmse_model'].append(meta['geom_rmse_model'])
metadata['gcp_model'].append(meta['gcp_model'])
metadata['quality'].append(meta['quality'])
metadata['sun_azimuth'].append(meta['sun_azimuth'])
metadata['sun_elevation'].append(meta['sun_elevation'])
# rescale intensities # rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool) im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool) im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
# pansharpen rgb image # pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool) im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible) # add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2) im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
# calculate NDWI # calculate NDWI
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool) im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
# edge detection # edge detection
wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool) wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool)
plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
plt.axis('image')
plt.title('Detected water lines')
plt.show()
# convert from pixels to world coordinates # convert from pixels to world coordinates
wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m']) wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m'])
# convert to output epsg spatial reference # convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg) wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg)
output_wl.append(wl) # find contour closest to narrabeen beach
sum_dist = np.zeros(len(wl))
for k,contour in enumerate(wl):
min_dist = np.zeros(len(pts_beach))
for j,pt in enumerate(pts_beach):
min_dist[j] = np.min(np.linalg.norm(contour - pt, axis=1))
sum_dist[k] = np.sum(min_dist)/len(min_dist)
try:
wl_beach = wl[np.argmin(sum_dist)]
# plt.figure()
# plt.axis('equal')
# plt.plot(pts_beach[:,0], pts_beach[:,1], 'ko')
# plt.plot(wl_beach[:,0], wl_beach[:,1], 'r')
# plt.show()
except:
wl_beach = []
# plot for QA
plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
for k,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
if len(wl_beach) > 0:
plt.plot(wl_pix[np.argmin(sum_dist)][:,1], wl_pix[np.argmin(sum_dist)][:,0], linewidth=3, color='w')
plt.axis('image')
plt.title('im ' + str(i) + ' : ' + datetime.strftime(datetime
.fromtimestamp(meta['timestamp']/1000, tz=pytz.utc)
.astimezone(pytz.timezone('Australia/Sydney')), '%Y-%m-%d %H:%M:%S %Z%z'))
plt.show()
# store metadata of each image in dict
metadata['timestamp'].append(meta['timestamp'])
metadata['date_acquired'].append(meta['date_acquired'])
metadata['cloud_cover'].append(sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]))
metadata['geom_rmse_model'].append(meta['geom_rmse_model'])
metadata['gcp_model'].append(meta['gcp_model'])
metadata['quality'].append(meta['quality'])
metadata['sun_azimuth'].append(meta['sun_azimuth'])
metadata['sun_elevation'].append(meta['sun_elevation'])
# store water lines
output_wl.append(wl_beach)
print(i) print(i)
# generate datetimes # generate datetimes
@ -128,8 +179,8 @@ data = metadata.copy()
data.update({'dt':dt}) data.update({'dt':dt})
data.update({'contours':output_wl}) data.update({'contours':output_wl})
with open('data_2016.pkl', 'wb') as f: #with open('data_2016.pkl', 'wb') as f:
pickle.dump(data, f) # pickle.dump(data, f)
#%% Load data #%% Load data
#with open('data_2016.pkl', 'rb') as f: #with open('data_2016.pkl', 'rb') as f:

@ -0,0 +1,260 @@
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 1 14:32:08 2018
@author: z5030440
Main code to extract shorelines from Landsat imagery
"""
# Preamble
import ee
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import pandas as pd
from datetime import datetime
import pickle
import pdb
import pytz
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.morphology as morphology
import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
#%% Select images
# parameters
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100 # minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
cloud_threshold = 0.7
# select collection
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
with open('data/narra_beach.pkl', 'rb') as f:
pts_beach = pickle.load(f)
with open('data/idx_nogt.pkl', 'rb') as f:
idx_nogt = pickle.load(f)
idx_nogt = np.array(idx_nogt)
#rect_narra = [[[151.301454, -33.700754],
# [151.311453, -33.702075],
# [151.307237, -33.739761],
# [151.294220, -33.736329],
# [151.301454, -33.700754]]];
# Dates
start_date = '2016-01-01'
end_date = '2016-12-31'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
#%% Extract shorelines
metadata = {'timestamp':[],
'date_acquired':[],
'cloud_cover':[],
'geom_rmse_model':[],
'gcp_model':[],
'quality':[],
'sun_azimuth':[],
'sun_elevation':[]}
skipped_images = np.zeros((n_img,1)).astype(bool)
output_wl = []
# loop through all images
for i in range(n_img):
if np.isin(i, idx_nogt):
continue
# find each image in ee database
im = ee.Image(im_all[i].get('id'))
# load image as np.array
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
# if clouds -> skip the image
if sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]) > cloud_threshold:
skipped_images[i] = True
continue
# rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
# calculate NDWI
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
# edge detection
wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool)
# convert from pixels to world coordinates
wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m'])
# convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg)
# find contour closest to narrabeen beach
sum_dist = np.zeros(len(wl))
for k,contour in enumerate(wl):
min_dist = np.zeros(len(pts_beach))
for j,pt in enumerate(pts_beach):
min_dist[j] = np.min(np.linalg.norm(contour - pt, axis=1))
sum_dist[k] = np.sum(min_dist)/len(min_dist)
try:
wl_beach = wl[np.argmin(sum_dist)]
# plt.figure()
# plt.axis('equal')
# plt.plot(pts_beach[:,0], pts_beach[:,1], 'ko')
# plt.plot(wl_beach[:,0], wl_beach[:,1], 'r')
# plt.show()
except:
wl_beach = []
plt.figure()
plt.imshow(im_ms_ps[:,:,[2,1,0]])
for k,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
if len(wl_beach) > 0:
plt.plot(wl_pix[np.argmin(sum_dist)][:,1], wl_pix[np.argmin(sum_dist)][:,0], linewidth=3, color='w')
plt.axis('image')
plt.title('im ' + str(i) + ' : ' + datetime.strftime(datetime
.fromtimestamp(meta['timestamp']/1000, tz=pytz.utc)
.astimezone(pytz.timezone('Australia/Sydney')), '%Y-%m-%d %H:%M:%S %Z%z'))
plt.show()
# manually validate shoreline detection
input_pt = np.array(ginput(1))
if input_pt[0,1] > 300:
skipped_images[i] = True
continue
# store metadata of each image in dict
metadata['timestamp'].append(meta['timestamp'])
metadata['date_acquired'].append(meta['date_acquired'])
metadata['cloud_cover'].append(sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]))
metadata['geom_rmse_model'].append(meta['geom_rmse_model'])
metadata['gcp_model'].append(meta['gcp_model'])
metadata['quality'].append(meta['quality'])
metadata['sun_azimuth'].append(meta['sun_azimuth'])
metadata['sun_elevation'].append(meta['sun_elevation'])
# store water lines
output_wl.append(wl_beach)
print(i)
# generate datetimes
#fmt = '%Y-%m-%d %H:%M:%S %Z%z'
#au_tz = pytz.timezone('Australia/Sydney')
dt = [];
t = metadata['timestamp']
for k in range(len(t)): dt.append(datetime.fromtimestamp(t[k]/1000, tz=pytz.utc))
# save outputs
data = metadata.copy()
data.update({'dt':dt})
data.update({'contours':output_wl})
with open('data_gt15d_32_56.pkl', 'wb') as f:
pickle.dump(data, f)
#%% Load data
##with open('data_2016.pkl', 'rb') as f:
## data = pickle.load(f)
#
#
## load backgroud image
#i = 0
#im = ee.Image(im_all[i].get('id'))
#im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
#im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
#im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
#im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
#
#plt.figure()
#plt.imshow(im_ms_ps[:,:,[2,1,0]])
#plt.axis('image')
#plt.title('2016 shorelines')
#
#n = len(data['cloud_cover'])
#idx_best = []
## remove overlapping images, based on cloud cover
#for i in range(n):
# date_im = data['date_acquired'][i]
# idx = np.isin(data['date_acquired'], date_im)
# best = np.where(idx)[0][np.argmin(np.array(data['cloud_cover'])[idx])]
# if ~np.isin(best, idx_best):
# idx_best.append(best)
#
#point_narra = np.array([342500, 6266990])
#plt.figure()
#plt.axis('equal')
#plt.grid()
#cmap = cm.get_cmap('jet')
#colours = cmap(np.linspace(0, 1, num=len(idx_best)))
#for i, idx in enumerate(idx_best):
# for j in range(len(data['contours'][i])):
# if np.any(np.linalg.norm(data['contours'][i][j][:,[0,1]] - point_narra, axis=1) < 200):
# plt.plot(data['contours'][i][j][:,0], data['contours'][i][j][:,1],
# label=str(data['date_acquired'][i]),
# linewidth=2, color=colours[i,:])
#
#plt.legend()
#plt.show()
#
#pts_narra = sds.convert_epsg(pts_narra, output_epsg, 4326)
#
##kml.newlinestring(name="beach",
## coords = [(_[0], _[1]) for _ in pts_narra])
##kml.save("narra.kml")
#%%
#with open('data_gt15d_0_31.pkl', 'rb') as f:
# data1 = pickle.load(f)
#with open('data_gt15d_32_56.pkl', 'rb') as f:
# data2 = pickle.load(f)
#with open('data_gt15d_99_193.pkl', 'rb') as f:
# data3 = pickle.load(f)
#
#data = []
#data = data1.copy()
#for k,cat in enumerate(data.keys()):
# for j in range(len(data2[cat])):
# data[cat].append(data2[cat][j])
# for j in range(len(data3[cat])):
# data[cat].append(data3[cat][j])
#
#
#with open('data_gt_l8.pkl', 'wb') as f:
# pickle.dump(data, f)

@ -0,0 +1,136 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 16:15:51 2018
@author: z5030440
"""
import scipy.io as sio
import os
import ee
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
import pickle
import pdb
import pytz
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import skimage.morphology as morphology
import skimage.measure as measure
import sklearn.decomposition as decomposition
from scipy import spatial
# my functions
import functions.utils as utils
import functions.sds as sds
#plt.rcParams['axes.grid'] = True
au_tz = pytz.timezone('Australia/Sydney')
# load quadbike dates and convert from datenum to datetime
suffix = '.mat'
dir_name = os.getcwd()
file_name = 'data\quadbike_dates'
file_path = os.path.join(dir_name, file_name + suffix)
quad_dates = sio.loadmat(file_path)['dates']
dt_quad = []
for i in range(quad_dates.shape[0]):
dt_quad.append(datetime(quad_dates[i,0], quad_dates[i,1], quad_dates[i,2], tzinfo=au_tz))
# load satellite datetimes (in UTC) and convert to AEST time
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
# extract datetimes from image metadata
dt_sat = [_['properties']['system:time_start'] for _ in im_all]
dt_sat = [datetime.fromtimestamp(_/1000, tz=pytz.utc) for _ in dt_sat]
dt_sat = [_.astimezone(au_tz) for _ in dt_sat]
# calculate days difference
diff_days = [ [(x - _).days for _ in dt_quad] for x in dt_sat]
day_thresh = 15
idx = [utils.find_indices(_, lambda e: abs(e) < day_thresh) for _ in diff_days]
dt_diff = []
idx_nogt = []
for i in range(n_img):
if not idx[i]:
idx_nogt.append(i)
continue
dt_diff.append({"sat dt": dt_sat[i],
"quad dt": [dt_quad[_] for _ in idx[i]],
"days diff": [diff_days[i][_] for _ in idx[i]] })
with open('idx_nogt.pkl', 'wb') as f:
pickle.dump(idx_nogt, f)
#%%
dates_sat = mdates.date2num(dt_sat)
dates_quad = mdates.date2num(dt_quad)
plt.figure()
plt.plot_date(dates_sat, np.zeros((n_img,1)))
plt.plot_date(dates_quad, np.ones((len(dates_quad),1)))
plt.show()
data = pd.read_pickle('data_2016.pkl')
dt_sat = [_.astimezone(au_tz) for _ in data['dt']]
[ (_ - dt_sat[0]).days for _ in dt_quad]
dn_sat = []
for i in range(len(dt_sat)): dn_sat.append(dt_sat[i].toordinal())
dn_sat = np.array(dn_sat)
dn_sur = []
for i in range(len(dt_survey)): dn_sur.append(dt_survey[i].toordinal())
dn_sur = np.array(dn_sur)
distances = np.zeros((len(dn_sat),4)).astype('int32')
indexes = np.zeros((len(dn_sat),2)).astype('int32')
for i in range(len(dn_sat)):
distances[i,0] = np.sort(abs(dn_sat[i] - dn_sur))[0]
distances[i,1] = np.sort(abs(dn_sat[i] - dn_sur))[1]
distances[i,2] = dt_sat[i].year
distances[i,3] = dt_sat[i].month
indexes[i,0] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[0])[0][0]
indexes[i,1] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[1])[0][0]
years = [2013, 2014, 2015, 2016]
months = mdates.MonthLocator()
days = mdates.DayLocator()
month_fmt = mdates.DateFormatter('%b')
f, ax = plt.subplots(4, 1)
for i, ca in enumerate(ax):
ca.xaxis.set_major_locator(months)
ca.xaxis.set_major_formatter(month_fmt)
ca.xaxis.set_minor_locator(days)
ca.set_ylabel(str(years[i]))
for j in range(len(dt_sat)):
if dt_sat[j].year == years[i]:
ca.plot(dt_sat[j],0, 'bo', markerfacecolor='b')
#f.subplots_adjust(hspace=0)
#plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False)
plt.plot(dt_survey, np.zeros([len(dt_survey),1]), 'bo')
plt.plot(dt_sat, np.ones([len(dt_sat),1]), 'ro')
plt.yticks([])
plt.show()
Loading…
Cancel
Save