You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
233 lines
7.5 KiB
Python
233 lines
7.5 KiB
Python
# -*- coding: utf-8 -*-
|
|
"""
|
|
Created on Thu Mar 1 14:32:08 2018
|
|
|
|
@author: z5030440
|
|
|
|
Main code to extract shorelines from Landsat imagery
|
|
"""
|
|
# Preamble
|
|
|
|
import ee
|
|
import matplotlib.pyplot as plt
|
|
import matplotlib.cm as cm
|
|
import numpy as np
|
|
import pandas as pd
|
|
from datetime import datetime
|
|
import pickle
|
|
import pdb
|
|
import pytz
|
|
|
|
|
|
# image processing modules
|
|
import skimage.filters as filters
|
|
import skimage.exposure as exposure
|
|
import skimage.transform as transform
|
|
import sklearn.decomposition as decomposition
|
|
import skimage.morphology as morphology
|
|
import skimage.measure as measure
|
|
|
|
# my functions
|
|
import functions.utils as utils
|
|
import functions.sds as sds
|
|
|
|
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
|
ee.Initialize()
|
|
|
|
#%% Select images
|
|
|
|
# parameters
|
|
plot_bool = False # if you want the plots
|
|
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
|
min_contour_points = 100 # minimum number of points contained in each water line
|
|
output_epsg = 28356 # GDA94 / MGA Zone 56
|
|
cloud_threshold = 0.8
|
|
|
|
# select collection
|
|
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
|
|
|
# location (Narrabeen-Collaroy beach)
|
|
rect_narra = [[[151.3473129272461,-33.69035274454718],
|
|
[151.2820816040039,-33.68206818063878],
|
|
[151.27281188964844,-33.74775138989556],
|
|
[151.3425064086914,-33.75231878701767],
|
|
[151.3473129272461,-33.69035274454718]]];
|
|
|
|
with open('data/narra_beach.pkl', 'rb') as f:
|
|
pts_beach = pickle.load(f)
|
|
|
|
#rect_narra = [[[151.301454, -33.700754],
|
|
# [151.311453, -33.702075],
|
|
# [151.307237, -33.739761],
|
|
# [151.294220, -33.736329],
|
|
# [151.301454, -33.700754]]];
|
|
|
|
# Dates
|
|
start_date = '2016-01-01'
|
|
end_date = '2016-12-31'
|
|
# filter by location
|
|
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra)).filterDate(start_date, end_date)
|
|
|
|
n_img = flt_col.size().getInfo()
|
|
print('Number of images covering Narrabeen:', n_img)
|
|
im_all = flt_col.getInfo().get('features')
|
|
|
|
#%% Extract shorelines
|
|
metadata = {'timestamp':[],
|
|
'date_acquired':[],
|
|
'cloud_cover':[],
|
|
'geom_rmse_model':[],
|
|
'gcp_model':[],
|
|
'quality':[],
|
|
'sun_azimuth':[],
|
|
'sun_elevation':[]}
|
|
skipped_images = np.zeros((n_img,1)).astype(bool)
|
|
output_wl = []
|
|
# loop through all images
|
|
for i in range(n_img):
|
|
|
|
# find each image in ee database
|
|
im = ee.Image(im_all[i].get('id'))
|
|
# load image as np.array
|
|
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
|
|
|
|
# if clouds -> skip the image
|
|
if sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]) > cloud_threshold:
|
|
skipped_images[i] = True
|
|
continue
|
|
|
|
# rescale intensities
|
|
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
|
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
|
|
|
# pansharpen rgb image
|
|
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
|
|
|
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
|
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
|
|
|
# calculate NDWI
|
|
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
|
|
|
|
# edge detection
|
|
wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool)
|
|
|
|
plt.figure()
|
|
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
|
for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
|
|
plt.axis('image')
|
|
plt.title('Detected water lines')
|
|
plt.show()
|
|
|
|
# convert from pixels to world coordinates
|
|
wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m'])
|
|
# convert to output epsg spatial reference
|
|
wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg)
|
|
|
|
# find contour closest to narrabeen beach
|
|
sum_dist = np.zeros(len(wl))
|
|
for k,contour in enumerate(wl):
|
|
min_dist = np.zeros(len(pts_beach))
|
|
for j,pt in enumerate(pts_beach):
|
|
min_dist[j] = np.min(np.linalg.norm(contour - pt, axis=1))
|
|
sum_dist[k] = np.sum(min_dist)/len(min_dist)
|
|
try:
|
|
wl_beach = wl[np.argmin(sum_dist)]
|
|
# plt.figure()
|
|
# plt.axis('equal')
|
|
# plt.plot(pts_beach[:,0], pts_beach[:,1], 'ko')
|
|
# plt.plot(wl_beach[:,0], wl_beach[:,1], 'r')
|
|
# plt.show()
|
|
except:
|
|
wl_beach = []
|
|
|
|
# plot for QA
|
|
plt.figure()
|
|
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
|
for k,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
|
|
if len(wl_beach) > 0:
|
|
plt.plot(wl_pix[np.argmin(sum_dist)][:,1], wl_pix[np.argmin(sum_dist)][:,0], linewidth=3, color='w')
|
|
plt.axis('image')
|
|
plt.title('im ' + str(i) + ' : ' + datetime.strftime(datetime
|
|
.fromtimestamp(meta['timestamp']/1000, tz=pytz.utc)
|
|
.astimezone(pytz.timezone('Australia/Sydney')), '%Y-%m-%d %H:%M:%S %Z%z'))
|
|
plt.show()
|
|
|
|
# store metadata of each image in dict
|
|
metadata['timestamp'].append(meta['timestamp'])
|
|
metadata['date_acquired'].append(meta['date_acquired'])
|
|
metadata['cloud_cover'].append(sum(sum(im_cloud.astype(int)))/(im_cloud.shape[0]*im_cloud.shape[1]))
|
|
metadata['geom_rmse_model'].append(meta['geom_rmse_model'])
|
|
metadata['gcp_model'].append(meta['gcp_model'])
|
|
metadata['quality'].append(meta['quality'])
|
|
metadata['sun_azimuth'].append(meta['sun_azimuth'])
|
|
metadata['sun_elevation'].append(meta['sun_elevation'])
|
|
# store water lines
|
|
output_wl.append(wl_beach)
|
|
|
|
print(i)
|
|
|
|
# generate datetimes
|
|
#fmt = '%Y-%m-%d %H:%M:%S %Z%z'
|
|
#au_tz = pytz.timezone('Australia/Sydney')
|
|
dt = [];
|
|
t = metadata['timestamp']
|
|
for k in range(len(t)): dt.append(datetime.fromtimestamp(t[k]/1000, tz=pytz.utc))
|
|
|
|
# save outputs
|
|
data = metadata.copy()
|
|
data.update({'dt':dt})
|
|
data.update({'contours':output_wl})
|
|
|
|
#with open('data_2016.pkl', 'wb') as f:
|
|
# pickle.dump(data, f)
|
|
#%% Load data
|
|
|
|
#with open('data_2016.pkl', 'rb') as f:
|
|
# data = pickle.load(f)
|
|
|
|
|
|
# load backgroud image
|
|
i = 0
|
|
im = ee.Image(im_all[i].get('id'))
|
|
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, rect_narra, plot_bool)
|
|
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
|
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
|
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
|
|
|
plt.figure()
|
|
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
|
plt.axis('image')
|
|
plt.title('2016 shorelines')
|
|
|
|
n = len(data['cloud_cover'])
|
|
idx_best = []
|
|
# remove overlapping images, based on cloud cover
|
|
for i in range(n):
|
|
date_im = data['date_acquired'][i]
|
|
idx = np.isin(data['date_acquired'], date_im)
|
|
best = np.where(idx)[0][np.argmin(np.array(data['cloud_cover'])[idx])]
|
|
if ~np.isin(best, idx_best):
|
|
idx_best.append(best)
|
|
|
|
point_narra = np.array([342500, 6266990])
|
|
plt.figure()
|
|
plt.axis('equal')
|
|
plt.grid()
|
|
cmap = cm.get_cmap('jet')
|
|
colours = cmap(np.linspace(0, 1, num=len(idx_best)))
|
|
for i, idx in enumerate(idx_best):
|
|
for j in range(len(data['contours'][i])):
|
|
if np.any(np.linalg.norm(data['contours'][i][j][:,[0,1]] - point_narra, axis=1) < 200):
|
|
plt.plot(data['contours'][i][j][:,0], data['contours'][i][j][:,1],
|
|
label=str(data['date_acquired'][i]),
|
|
linewidth=2, color=colours[i,:])
|
|
|
|
plt.legend()
|
|
plt.show()
|
|
|
|
|
|
|
|
|
|
|