finalised new shoreline detection method

development
kvos 7 years ago
parent b99c2acaf3
commit c1c1e6aacb

Binary file not shown.

Binary file not shown.

@ -84,11 +84,23 @@ input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# [175.853956, -36.998749],
# [175.852115, -36.985414]]];
# Location (Duck)
polygon = [[[-75.766220, 36.195928],
[-75.748282, 36.196401],
[-75.738851, 36.173974],
[-75.763546, 36.174249],
[-75.766220, 36.195928]]];
#polygon = [[[-75.766220, 36.195928],
# [-75.748282, 36.196401],
# [-75.738851, 36.173974],
# [-75.763546, 36.174249],
# [-75.766220, 36.195928]]];
# Location (Broulee Island)
#polygon = [[[150.173557, -35.847138],
# [150.196164, -35.848064],
# [150.195143, -35.869967],
# [150.172779, -35.861760],
# [150.173557, -35.847138]]];
# Location (Rarotonga, Muri lagoon)
polygon = [[[-159.732071, -21.241348],
[-159.719820, -21.242892],
[-159.720006, -21.261134],
[-159.731592, -21.258875],
[-159.732071, -21.241348]]];
# dates
start_date = '2013-01-01'
@ -106,7 +118,10 @@ satname = 'L8'
#sitename = 'OLDBAR'
#sitename = 'SANDMOTOR'
#sitename = 'TAIRUA'
sitename = 'DUCK'
#sitename = 'DUCK'
#sitename = 'BROULEE'
sitename = 'MURI'
suffix = '.tif'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)

@ -848,7 +848,7 @@ def find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool):
im_mwi_buffer = np.copy(im_mwi)
im_mwi_buffer[~im_buffer] = np.nan
contours_wi = measure.find_contours(im_wi_buffer, t_wi)
contours_mwi = measure.find_contours(im_mwi_buffer, t_mwi)
contours_mwi = measure.find_contours(im_mwi, t_mwi) # WARNING (on entire image)
if plot_bool:
@ -861,9 +861,9 @@ def find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool):
im[im_labels[:,:,k],2] = colours[k,2]
fig = plt.figure()
gs = gridspec.GridSpec(2, 2, width_ratios=[3, 1])
gs = gridspec.GridSpec(3, 3, height_ratios=[1, 1, 3])
ax1 = fig.add_subplot(gs[0,0])
ax1 = fig.add_subplot(gs[0,:])
vals = plt.hist(int_water[:,0], bins=100, label='water')
plt.hist(int_sand[:,0], bins=100, alpha=0.5, label='sand')
plt.hist(int_swash[:,0], bins=100, alpha=0.5, label='swash')
@ -871,7 +871,7 @@ def find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool):
plt.legend()
plt.title('Water Index NIR-G')
ax2 = fig.add_subplot(gs[1,0], sharex=ax1)
ax2 = fig.add_subplot(gs[1,:], sharex=ax1)
vals = plt.hist(int_water[:,1], bins=100, label='water')
plt.hist(int_sand[:,1], bins=100, alpha=0.5, label='sand')
plt.hist(int_swash[:,1], bins=100, alpha=0.5, label='swash')
@ -879,12 +879,34 @@ def find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool):
plt.legend()
plt.title('Modified Water Index SWIR-G')
ax3 = fig.add_subplot(gs[:,1])
ax3 = fig.add_subplot(gs[2,0])
plt.imshow(im)
# for i,contour in enumerate(contours_wi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='r')
for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
for i,contour in enumerate(contours_wi): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
plt.grid(False)
plt.xticks([])
plt.yticks([])
plt.gcf().set_size_inches(17.99,7.55)
ax4 = fig.add_subplot(gs[2,1], sharex=ax3, sharey=ax3)
plt.imshow(im_display)
for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
for i,contour in enumerate(contours_wi): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
plt.grid(False)
plt.xticks([])
plt.yticks([])
ax5 = fig.add_subplot(gs[2,2], sharex=ax3, sharey=ax3)
plt.imshow(im_mwi, cmap='seismic')
for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
for i,contour in enumerate(contours_wi): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
plt.grid(False)
plt.xticks([])
plt.yticks([])
# plt.gcf().set_size_inches(17.99,7.55)
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.gcf().set_tight_layout(True)
plt.draw()

@ -59,3 +59,11 @@ def find_indices(lst, condition):
def reject_outliers(data, m=2):
"rejects outliers in a numpy array"
return data[abs(data - np.mean(data)) < m * np.std(data)]
def duplicates_dict(lst):
"return duplicates and indices"
# nested function
def duplicates(lst, item):
return [i for i, x in enumerate(lst) if x == item]
return dict((x, duplicates(lst, x)) for x in set(lst) if lst.count(x) > 1)

@ -82,12 +82,12 @@ for i in idx_nocloud:
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]

@ -95,14 +95,14 @@ for i in range(N):
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]

@ -0,0 +1,228 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Run Neural Network on image to extract sandy pixels
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
from matplotlib import gridspec
from datetime import datetime, timedelta
import pytz
import ee
import pdb
import time
import pandas as pd
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
import skimage.morphology as morphology
from scipy import ndimage
import imageio
# machine learning modules
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler, Normalizer
from sklearn.externals import joblib
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.2 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 100 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 10 # number of pixels in a beach (pixel classification)
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
#sitename = 'NARRA_all'
#sitename = 'NARRA'
#sitename = 'OLDBAR'
#sitename = 'OLDBAR_inlet'
#sitename = 'SANDMOTOR'
#sitename = 'TAIRUA'
#sitename = 'DUCK'
#sitename = 'BROULEE'
sitename = 'MURI'
# Load metadata
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
idx_skipped = []
idx_nocloud = []
n_features = 10
train_pos = np.nan*np.ones((1,n_features))
train_neg = np.nan*np.ones((1,n_features))
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
#%%
for i in range(N):
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
# skip if cloud cover is more than the threshold
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
if sum(sum(im_labels[:,:,0])) == 0 :
print('skip ' + str(i) + ' - no sand')
idx_skipped.append(i)
continue
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
im = np.copy(im_display)
# define colours for plot
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
for k in range(0,im_labels.shape[2]):
im[im_labels[:,:,k],0] = colours[k,0]
im[im_labels[:,:,k],1] = colours[k,1]
im[im_labels[:,:,k],2] = colours[k,2]
# fig = plt.figure()
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
# ax1 = plt.subplot(121)
# plt.imshow(im_display)
# plt.axis('off')
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
# plt.imshow(im)
# plt.axis('off')
# plt.gcf().set_size_inches(17.99,7.55)
# plt.tight_layout()
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
# plt.draw()
date_im = timestamps_sorted[i].strftime('%d %b %Y')
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
fig = plt.figure()
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
ax1 = fig.add_subplot(gs[0,:])
plt.plot(0,0,'ko',daysall,0,'ko')
plt.plot([0,daysall],[0,0],'k-')
plt.plot(daysnow,0,'ro')
plt.text(0,0.05,'2013')
plt.text(daysall,0.05,'2019')
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
plt.axis('off')
ax2 = fig.add_subplot(gs[1,0])
plt.imshow(im_display)
plt.axis('off')
plt.title(date_im, fontsize=17, fontweight='bold')
ax3 = fig.add_subplot(gs[1,1])
plt.imshow(im)
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
plt.axis('off')
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
black_line = mlines.Line2D([],[],color='k',linestyle='-', label='shoreline')
plt.legend(handles=[orange_patch,white_patch,blue_patch, black_line], bbox_to_anchor=(0.95, 0.2))
plt.gcf().set_size_inches(17.99,7.55)
plt.gcf().set_tight_layout(True)
plt.draw()
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
plt.close()
# create gif
images = []
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
for filename in filenames:
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
writer.append_data(image)

@ -0,0 +1,193 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Run Neural Network on image to extract sandy pixels
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
from matplotlib import gridspec
from datetime import datetime, timedelta
import pytz
import ee
import pdb
import time
import pandas as pd
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
import skimage.morphology as morphology
from scipy import ndimage
import imageio
# machine learning modules
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler, Normalizer
from sklearn.externals import joblib
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 100 # upper probability to clip and rescale pixel intensity
min_contour_points = 30# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 10 # number of pixels in a beach (pixel classification)
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
#sitename = 'NARRA_all'
#sitename = 'NARRA'
#sitename = 'OLDBAR'
#sitename = 'OLDBAR_inlet'
#sitename = 'SANDMOTOR'
#sitename = 'TAIRUA'
#sitename = 'DUCK'
#sitename = 'BROULEE'
sitename = 'MURI2'
# Load metadata
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
idx_skipped = []
idx_nocloud = []
n_features = 10
train_pos = np.nan*np.ones((1,n_features))
train_neg = np.nan*np.ones((1,n_features))
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
#%%
for i in range(N):
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
# skip if cloud cover is more than the threshold
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
# extract shorelines (old method)
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
date_im = timestamps_sorted[i].strftime('%d %b %Y')
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
fig = plt.figure()
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
ax1 = fig.add_subplot(gs[0,:])
plt.plot(0,0,'ko',daysall,0,'ko')
plt.plot([0,daysall],[0,0],'k-')
plt.plot(daysnow,0,'ro')
plt.text(0,0.05,'2013')
plt.text(daysall,0.05,'2019')
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
plt.axis('off')
# ax2 = fig.add_subplot(gs[1,0])
# plt.imshow(im_display)
# plt.axis('off')
# plt.title(date_im, fontsize=17, fontweight='bold')
ax3 = fig.add_subplot(gs[1,:])
plt.imshow(im_display)
for l,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
plt.title(date_im, fontsize=17, fontweight='bold')
plt.axis('off')
plt.gcf().set_size_inches(5.34,9.18)
plt.gcf().set_tight_layout(True)
plt.draw()
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
plt.close()
# create gif
images = []
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
with imageio.get_writer(sitename + '_final.gif', mode='I', duration=0.6) as writer:
for filename in filenames:
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
writer.append_data(image)

@ -0,0 +1,227 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Run Neural Network on image to extract sandy pixels
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
from matplotlib import gridspec
from datetime import datetime, timedelta
import pytz
import ee
import pdb
import time
import pandas as pd
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
import skimage.morphology as morphology
from scipy import ndimage
import imageio
# machine learning modules
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler, Normalizer
from sklearn.externals import joblib
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.2 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 100 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 20 # number of pixels in a beach (pixel classification)
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
#sitename = 'NARRA_all'
sitename = 'NARRA'
#sitename = 'OLDBAR'
#sitename = 'OLDBAR_inlet'
#sitename = 'SANDMOTOR'
#sitename = 'TAIRUA'
#sitename = 'DUCK'
#sitename = 'BROULEE'
# Load metadata
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
idx_skipped = []
idx_nocloud = []
n_features = 10
train_pos = np.nan*np.ones((1,n_features))
train_neg = np.nan*np.ones((1,n_features))
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
#%%
for i in range(N):
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
# skip if cloud cover is more than the threshold
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
if sum(sum(im_labels[:,:,0])) == 0 :
print('skip ' + str(i) + ' - no sand')
idx_skipped.append(i)
continue
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
im = np.copy(im_display)
# define colours for plot
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
for k in range(0,im_labels.shape[2]):
im[im_labels[:,:,k],0] = colours[k,0]
im[im_labels[:,:,k],1] = colours[k,1]
im[im_labels[:,:,k],2] = colours[k,2]
# fig = plt.figure()
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
# ax1 = plt.subplot(121)
# plt.imshow(im_display)
# plt.axis('off')
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
# plt.imshow(im)
# plt.axis('off')
# plt.gcf().set_size_inches(17.99,7.55)
# plt.tight_layout()
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
# plt.draw()
date_im = timestamps_sorted[i].strftime('%d %b %Y')
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
fig = plt.figure()
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
ax1 = fig.add_subplot(gs[0,:])
plt.plot(0,0,'ko',daysall,0,'ko')
plt.plot([0,daysall],[0,0],'k-')
plt.plot(daysnow,0,'ro')
plt.text(0,0.05,'2013')
plt.text(daysall,0.05,'2019')
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
plt.axis('off')
# ax2 = fig.add_subplot(gs[1,0])
# plt.imshow(im_display)
# plt.axis('off')
# plt.title(date_im, fontsize=17, fontweight='bold')
ax3 = fig.add_subplot(gs[1,:])
plt.imshow(im)
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
plt.axis('off')
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
black_line = mlines.Line2D([],[],color='k',linestyle='--', label='shoreline')
plt.legend(handles=[orange_patch,white_patch,blue_patch, black_line], bbox_to_anchor=(0.6, 0.6))
plt.title(date_im, fontsize=17, fontweight='bold')
plt.gcf().set_size_inches(5.34,9.18)
plt.gcf().set_tight_layout(True)
plt.draw()
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
plt.close()
# create gif
images = []
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
for filename in filenames:
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
writer.append_data(image)

@ -0,0 +1,229 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Run Neural Network on image to extract sandy pixels
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
from matplotlib import gridspec
from datetime import datetime, timedelta
import pytz
import ee
import pdb
import time
import pandas as pd
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
import skimage.morphology as morphology
from scipy import ndimage
import imageio
# machine learning modules
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler, Normalizer
from sklearn.externals import joblib
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.2 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 100 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 50 # number of pixels in a beach (pixel classification)
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
sitename = 'NARRA_all'
#sitename = 'NARRA'
#sitename = 'OLDBAR'
#sitename = 'OLDBAR_inlet'
#sitename = 'SANDMOTOR'
#sitename = 'TAIRUA'
#sitename = 'DUCK'
#sitename = 'BROULEE'
# Load metadata
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
idx_skipped = []
idx_nocloud = []
n_features = 10
train_pos = np.nan*np.ones((1,n_features))
train_neg = np.nan*np.ones((1,n_features))
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
#%%
for i in range(1):
i = 156 # open (96 close)
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
# skip if cloud cover is more than the threshold
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
if sum(sum(im_labels[:,:,0])) == 0 :
print('skip ' + str(i) + ' - no sand')
idx_skipped.append(i)
continue
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
im = np.copy(im_display)
# define colours for plot
colours = np.array([[1,128/255,0/255],[0,0,204/255],[0,0,204/255]])
for k in range(0,im_labels.shape[2]):
im[im_labels[:,:,k],0] = colours[k,0]
im[im_labels[:,:,k],1] = colours[k,1]
im[im_labels[:,:,k],2] = colours[k,2]
# fig = plt.figure()
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
# ax1 = plt.subplot(121)
# plt.imshow(im_display)
# plt.axis('off')
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
# plt.imshow(im)
# plt.axis('off')
# plt.gcf().set_size_inches(17.99,7.55)
# plt.tight_layout()
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
# plt.draw()
date_im = timestamps_sorted[i].strftime('%d %b %Y')
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
fig = plt.figure()
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
ax1 = fig.add_subplot(gs[0,:])
plt.plot(0,0,'ko',daysall,0,'ko')
plt.plot([0,daysall],[0,0],'k-')
plt.plot(daysnow,0,'ro')
plt.text(0,0.05,'2013')
plt.text(daysall,0.05,'2019')
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
plt.axis('off')
ax2 = fig.add_subplot(gs[1,0])
plt.imshow(im_display)
plt.axis('off')
plt.title(date_im, fontsize=17, fontweight='bold')
ax3 = fig.add_subplot(gs[1,1], sharex=ax2, sharey=ax2)
plt.imshow(im)
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
plt.axis('off')
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
black_line = mlines.Line2D([],[],color='k',linestyle='--', label='water line')
plt.legend(handles=[orange_patch,blue_patch, black_line], bbox_to_anchor=(0.6, 0.6))
# plt.title(date_im, fontsize=17, fontweight='bold')
plt.gcf().set_size_inches(11.38, 7.51)
plt.gcf().set_tight_layout(True)
plt.draw()
# plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
# plt.close()
# create gif
#images = []
#filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
#with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
# for filename in filenames:
# image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
# writer.append_data(image)

Binary file not shown.

@ -84,12 +84,12 @@ for i in range(N):
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
@ -129,7 +129,7 @@ for i in range(N):
idx_skipped.append(i)
continue
else:
# del shorelines[idx_samedate]
del shorelines[idx_samedate]
del t[idx_samedate]
del cloud_cover_ts[idx_samedate]
del date_acquired_ts[idx_samedate]

@ -16,6 +16,7 @@ from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
from shapely.geometry import LineString
# image processing modules
import skimage.filters as filters
@ -42,12 +43,15 @@ plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.3 # threshold for cloud cover
cloud_thresh = 0.2 # threshold for cloud cover
plot_bool = False # if you want the plots
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 7 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 50 # number of pixels in a beach (pixel classification)
min_beach_size = 20 # number of pixels in a beach (pixel classification)
dist_ref = 100
min_length_wl = 300
manual = False
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
@ -64,6 +68,13 @@ with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f)
with open(os.path.join(filepath, sitename + '_refpoints' + '.pkl'), 'rb') as f:
refpoints = pickle.load(f)
try:
with open(os.path.join(filepath, sitename + '_skipped_new' + '.pkl'), 'rb') as f:
idx_skipped = pickle.load(f)
except:
idx_skipped = []
manual = True
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
timestamps_sorted = sorted(timestamps)
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
@ -80,19 +91,20 @@ N = len(file_names_pan)
cloud_cover_ts = []
date_acquired_ts = []
acc_georef_ts = []
idx_skipped = []
idx_nocloud = []
t = []
shorelines = []
idx_keep = []
#%%
for i in range(N):
if ~manual:
if i in idx_skipped:
continue
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(k + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrows = im_pan.shape[0]
ncols = im_pan.shape[1]
@ -100,7 +112,7 @@ for i in range(N):
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(k + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
@ -120,40 +132,11 @@ for i in range(N):
# calculate cloud cover and skip image if too high
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if manual:
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(cloud_cover) + ')')
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# check if image for that date already exists and choose the best in terms of cloud cover and georeferencing
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
# find the index of the image that is repeated
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
idx_samedate = idx_samedate[0]
print('cloud cover ' + str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
print('acc georef ' + str(acc_georef_sorted[i]) + ' - ' + str(acc_georef_ts[idx_samedate]))
# keep image with less cloud cover or best georeferencing accuracy
if cloud_cover < cloud_cover_ts[idx_samedate] - 0.01:
skip = False
elif acc_georef_sorted[i] < acc_georef_ts[idx_samedate]:
skip = False
else:
skip = True
if skip:
print('skip ' + str(i) + ' - repeated')
idx_skipped.append(i)
continue
else:
# del shorelines[idx_samedate]
del t[idx_samedate]
del cloud_cover_ts[idx_samedate]
del date_acquired_ts[idx_samedate]
del acc_georef_ts[idx_samedate]
print('keep ' + str(i) + ' - deleted ' + str(idx_samedate))
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
@ -164,20 +147,119 @@ for i in range(N):
# classify image in 4 classes (sand, whitewater, water, other) with NN classifier
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
idx_keep.append(i)
# # manually validate classification
# pt_in = np.array(ginput(n=1, timeout=1000))
# if pt_in[0][1] > nrows/2:
# print('skip ' + str(i) + ' - wrong classification')
# idx_skipped.append(i)
# continue
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
if manual:
if sum(sum(im_labels[:,:,0])) == 0 :
print('skip ' + str(i) + ' - no sand')
idx_skipped.append(i)
continue
# extract shorelines (new method)
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, True)
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool)
plt.figure()
im = np.copy(im_display)
# define colours for plot
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
for k in range(0,im_labels.shape[2]):
im[im_labels[:,:,k],0] = colours[k,0]
im[im_labels[:,:,k],1] = colours[k,1]
im[im_labels[:,:,k],2] = colours[k,2]
plt.imshow(im)
for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.tight_layout()
plt.draw()
# manually validate detection
if manual:
pt_in = np.array(ginput(n=1, timeout=1000))
plt.close()
if pt_in[0][1] > nrows/2:
print('skip ' + str(i) + ' - wrong detection')
idx_skipped.append(i)
continue
else:
plt.close()
# remove contour points that are around clouds (nan values)
for k, contour in enumerate(contours_mwi):
if np.any(np.isnan(contour)):
index_nan = np.where(np.isnan(contour))[0]
contour = np.delete(contour, index_nan, axis=0)
# convert from pixels to world coordinates
wl_coords = sds.convert_pix2world(contours_mwi, georef)
# convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
# remove contours that have a perimeter < min_length_wl as usually they are not shoreline
wl_good = []
for l, wls in enumerate(wl):
coords = [(wls[k,0], wls[k,1]) for k in range(len(wls))]
a = LineString(coords) # shapely LineString structure
if a.length >= min_length_wl:
wl_good.append(wls)
# pre-process points (list of arrays to single array of points)
x_points = np.array([])
y_points = np.array([])
for k in range(len(wl_good)):
x_points = np.append(x_points,wl_good[k][:,0])
y_points = np.append(y_points,wl_good[k][:,1])
wl_good = np.transpose(np.array([x_points,y_points]))
# only select points around Narrabeen beach (refpoints given)
temp = np.zeros((len(wl_good))).astype(bool)
for k in range(len(refpoints)):
temp = np.logical_or(np.linalg.norm(wl_good - refpoints[k,[0,1]], axis=1) < dist_ref, temp)
wl_final = wl_good[temp]
# plt.figure()
# plt.axis('equal')
# plt.plot(wl_final[:,0],wl_final[:,1],'k.')
# plt.draw()
# save data
shorelines.append(wl_final)
t.append(timestamps_sorted[i])
cloud_cover_ts.append(cloud_cover)
acc_georef_ts.append(acc_georef_sorted[i])
date_acquired_ts.append(file_names_pan[i][9:19])
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts, 'acc_georef':acc_georef_ts}
#with open(os.path.join(filepath, sitename + '_output_new' + '.pkl'), 'wb') as f:
# pickle.dump(output, f)
#
#with open(os.path.join(filepath, sitename + '_skipped_new' + '.pkl'), 'wb') as f:
# pickle.dump(idx_skipped, f)
# plt.figure()
# plt.axis('equal')
# plt.plot(refpoints[:,0], refpoints[:,1], 'ko')
# plt.plot(all_points[temp,0], all_points[temp,1], 'go')
# plt.plot(all_points[~temp,0], all_points[~temp,1], 'ro')
# plt.draw()
# extract shorelines (old method)
# im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
# wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
# plt.figure()
# plt.imshow(im_display)
# for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
# for k,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
# plt.draw()

@ -0,0 +1,284 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Extract shorelines from Landsat images
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import ee
import pdb
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
from shapely.geometry import LineString
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
import skimage.morphology as morphology
# machine learning modules
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler, Normalizer
from sklearn.externals import joblib
# import own modules
import functions.utils as utils
import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# parameters
cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
buffer_size = 7 # radius (in pixels) of disk for buffer (pixel classification)
min_beach_size = 20 # number of pixels in a beach (pixel classification)
dist_ref = 100
min_length_wl = 300
# load metadata (timestamps and epsg code) for the collection
satname = 'L8'
sitename = 'NARRA'
#sitename = 'OLDBAR'
# Load metadata
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'rb') as f:
acc_georef = pickle.load(f)
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f)
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
refpoints = pickle.load(f)
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
timestamps_sorted = sorted(timestamps)
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
acc_georef_sorted = [acc_georef[j] for j in idx_sorted]
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
# initialise some variables
cloud_cover_ts = []
date_acquired_ts = []
acc_georef_ts = []
idx_skipped = []
idx_nocloud = []
t = []
shorelines = []
#%%
for i in range(N):
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrows = im_pan.shape[0]
ncols = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True,
mode='constant').astype('bool_')
# resize the image using bilinear interpolation (order 1)
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
order=1, preserve_range=True, mode='constant')
# check if -inf or nan values and add to cloud mask
im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
# calculate cloud cover and skip image if too high
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_cover > cloud_thresh:
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
idx_skipped.append(i)
continue
idx_nocloud.append(i)
# pansharpen rgb image
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
# rescale pansharpened RGB for visualisation
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
# classify image in 4 classes (sand, whitewater, water, other) with NN classifier
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
# # manually validate classification
# pt_in = np.array(ginput(n=1, timeout=1000))
# if pt_in[0][1] > nrows/2:
# print('skip ' + str(i) + ' - wrong classification')
# idx_skipped.append(i)
# continue
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
if sum(sum(im_labels[:,:,0])) == 0 :
print('skip ' + str(i) + ' - no sand')
idx_skipped.append(i)
continue
# extract shorelines (new method)
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool)
plt.figure()
im = np.copy(im_display)
# define colours for plot
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
for k in range(0,im_labels.shape[2]):
im[im_labels[:,:,k],0] = colours[k,0]
im[im_labels[:,:,k],1] = colours[k,1]
im[im_labels[:,:,k],2] = colours[k,2]
plt.imshow(im)
for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.tight_layout()
plt.draw()
# manually validate detection
pt_in = np.array(ginput(n=1, timeout=1000))
if pt_in[0][1] > nrows/2:
print('skip ' + str(i) + ' - wrong detection')
idx_skipped.append(i)
continue
# remove contour points that are around clouds (nan values)
for k, contour in enumerate(contours_mwi):
if np.any(np.isnan(contour)):
index_nan = np.where(np.isnan(contour))[0]
contour = np.delete(contour, index_nan, axis=0)
# convert from pixels to world coordinates
wl_coords = sds.convert_pix2world(contours_mwi, georef)
# convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
# remove contours that have a perimeter < min_length_wl as usually they are not shoreline
wl_good = []
for l, wls in enumerate(wl):
coords = [(wls[k,0], wls[k,1]) for k in range(len(wls))]
a = LineString(coords) # shapely LineString structure
if a.length >= min_length_wl:
wl_good.append(wls)
# pre-process points (list of arrays to single array of points)
x_points = np.array([])
y_points = np.array([])
for k in range(len(wl_good)):
x_points = np.append(x_points,wl_good[k][:,0])
y_points = np.append(y_points,wl_good[k][:,1])
wl_good = np.transpose(np.array([x_points,y_points]))
# only select points around Narrabeen beach (refpoints given)
temp = np.zeros((len(wl_good))).astype(bool)
for k in range(len(refpoints)):
temp = np.logical_or(np.linalg.norm(wl_good - refpoints[k,[0,1]], axis=1) < dist_ref, temp)
wl_final = wl_good[temp]
plt.figure()
plt.axis('equal')
plt.plot(wl_final[:,0],wl_final[:,1],'k.')
plt.draw()
# check if image for that date already exists and choose the best in terms of cloud cover and georeferencing
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
# find the index of the image that is repeated
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
idx_samedate = idx_samedate[0]
# print('cloud cover ' + str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
# print('acc georef ' + str(acc_georef_sorted[i]) + ' - ' + str(acc_georef_ts[idx_samedate]))
# keep image with less cloud cover or best georeferencing accuracy
if cloud_cover < cloud_cover_ts[idx_samedate] - 0.01:
skip = False
elif acc_georef_sorted[i] < acc_georef_ts[idx_samedate]:
skip = False
else:
skip = True
if skip:
print('skip ' + str(i) + ' - repeated')
idx_skipped.append(i)
continue
else:
del shorelines[idx_samedate]
del t[idx_samedate]
del cloud_cover_ts[idx_samedate]
del date_acquired_ts[idx_samedate]
del acc_georef_ts[idx_samedate]
print('keep ' + str(i) + ' - deleted ' + str(idx_samedate))
# save data
shorelines.append(wl_final)
t.append(timestamps_sorted[i])
cloud_cover_ts.append(cloud_cover)
acc_georef_ts.append(acc_georef_sorted[i])
date_acquired_ts.append(file_names_pan[i][9:19])
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts, 'acc_georef':acc_georef_ts}
#with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'wb') as f:
# pickle.dump(output, f)
#
#with open(os.path.join(filepath, sitename + '_skipped2' + '.pkl'), 'wb') as f:
# pickle.dump(idx_skipped, f)
#
#with open(os.path.join(filepath, sitename + '_idxnocloud2' + '.pkl'), 'wb') as f:
# pickle.dump(idx_nocloud, f)
# plt.figure()
# plt.axis('equal')
# plt.plot(refpoints[:,0], refpoints[:,1], 'ko')
# plt.plot(all_points[temp,0], all_points[temp,1], 'go')
# plt.plot(all_points[~temp,0], all_points[~temp,1], 'ro')
# plt.draw()
# extract shorelines (old method)
# im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
# wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
# plt.figure()
# plt.imshow(im_display)
# for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
# for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
# plt.draw()

@ -88,14 +88,14 @@ for i in range(N):
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]

@ -85,14 +85,14 @@ for i in range(N):
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
nrow = im_pan.shape[0]
ncol = im_pan.shape[1]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
im_ms = np.stack(bands, 2)
# cloud mask
im_qa = im_ms[:,:,5]

@ -0,0 +1,382 @@
# -*- coding: utf-8 -*-
#==========================================================#
# Compare Narrabeen SDS with 3D quadbike surveys
#==========================================================#
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import pdb
import ee
import matplotlib.dates as mdates
import matplotlib.cm as cm
from datetime import datetime, timedelta
import pickle
import pytz
import scipy.io as sio
import scipy.interpolate as interpolate
import statsmodels.api as sm
import skimage.measure as measure
# my functions
import functions.utils as utils
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
au_tz = pytz.timezone('Australia/Sydney')
# load quadbike dates and convert from datenum to datetime
filename = 'data\quadbike\survey_dates.mat'
filepath = os.path.join(os.getcwd(), filename)
dates_quad = sio.loadmat(filepath)['dates'] # matrix containing year, month, day
dates_quad = [datetime(dates_quad[i,0], dates_quad[i,1], dates_quad[i,2],
tzinfo=au_tz) for i in range(dates_quad.shape[0])]
# load timestamps from satellite images
satname = 'L8'
sitename = 'NARRA'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_output_new' + '.pkl'), 'rb') as f:
output = pickle.load(f)
dates_l8 = output['t']
# convert to AEST
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
# remove duplicates
dates_l8_str = [_.strftime('%Y%m%d') for _ in dates_l8]
dupl = utils.duplicates_dict(dates_l8_str)
idx_remove = []
for k,v in dupl.items():
idx1 = v[0]
idx2 = v[1]
c1 = output['cloud_cover'][idx1]
c2 = output['cloud_cover'][idx2]
g1 = output['acc_georef'][idx1]
g2 = output['acc_georef'][idx2]
if c1 < c2 - 0.01:
idx_remove.append(idx2)
elif g1 < g2 - 0.1:
idx_remove.append(idx2)
else:
idx_remove.append(idx1)
idx_remove = sorted(idx_remove)
idx_all = np.linspace(0,70,71)
idx_keep = list(np.where(~np.isin(idx_all,idx_remove))[0])
output['t'] = [output['t'][k] for k in idx_keep]
output['shorelines'] = [output['shorelines'][k] for k in idx_keep]
output['cloud_cover'] = [output['cloud_cover'][k] for k in idx_keep]
output['acc_georef'] = [output['acc_georef'][k] for k in idx_keep]
# convert to AEST
dates_l8 = output['t']
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
# load wave data (already AEST)
filename = 'data\wave\SydneyProcessed.mat'
filepath = os.path.join(os.getcwd(), filename)
wave_data = sio.loadmat(filepath)
idx = utils.find_indices(wave_data['dates'][:,0], lambda e: e >= dates_l8[0].year and e <= dates_l8[-1].year)
hsig = np.array([wave_data['Hsig'][i][0] for i in idx])
wdir = np.array([wave_data['Wdir'][i][0] for i in idx])
dates_wave = [datetime(wave_data['dates'][i,0], wave_data['dates'][i,1],
wave_data['dates'][i,2], wave_data['dates'][i,3],
wave_data['dates'][i,4], wave_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
# load tide data (already AEST)
filename = 'SydTideData.mat'
filepath = os.path.join(os.getcwd(), 'data', 'tide', filename)
tide_data = sio.loadmat(filepath)
idx = utils.find_indices(tide_data['dates'][:,0], lambda e: e >= dates_l8[0].year and e <= dates_l8[-1].year)
tide = np.array([tide_data['tide'][i][0] for i in idx])
dates_tide = [datetime(tide_data['dates'][i,0], tide_data['dates'][i,1],
tide_data['dates'][i,2], tide_data['dates'][i,3],
tide_data['dates'][i,4], tide_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
#%% make a plot of all the dates with wave data
orange = [255/255,140/255,0]
blue = [0,191/255,255/255]
f = plt.figure()
months = mdates.MonthLocator()
month_fmt = mdates.DateFormatter('%b %Y')
days = mdates.DayLocator()
years = [2013,2014,2015,2016]
for k in range(len(years)):
sel_year = years[k]
ax = plt.subplot(4,1,k+1)
idx_year = utils.find_indices(dates_wave, lambda e : e.year >= sel_year and e.year <= sel_year)
plt.plot([dates_wave[i] for i in idx_year], [hsig[i] for i in idx_year], 'k-', linewidth=0.5)
hsigmax = np.nanmax([hsig[i] for i in idx_year])
cbool = True
for j in range(len(dates_quad)):
if dates_quad[j].year == sel_year:
if cbool:
plt.plot([dates_quad[j], dates_quad[j]], [0, hsigmax], color=orange, label='survey')
cbool = False
else:
plt.plot([dates_quad[j], dates_quad[j]], [0, hsigmax], color=orange)
cbool = True
for j in range(len(dates_l8)):
if dates_l8[j].year == sel_year:
if cbool:
plt.plot([dates_l8[j], dates_l8[j]], [0, hsigmax], color=blue, label='landsat8')
cbool = False
else:
plt.plot([dates_l8[j], dates_l8[j]], [0, hsigmax], color=blue)
if k == 3:
plt.legend()
plt.xlim((datetime(sel_year,1,1), datetime(sel_year,12,31, tzinfo=au_tz)))
plt.ylim((0, hsigmax))
plt.ylabel('Hs [m]')
ax.xaxis.set_major_locator = months
ax.xaxis.set_major_formatter(month_fmt)
f.subplots_adjust(hspace=0.2)
plt.draw()
#%% calculate difference between dates (quad and sat)
diff_days = [ [(x - _).days for _ in dates_quad] for x in dates_l8]
max_diff = 14
idx_closest = [utils.find_indices(_, lambda e: abs(e) <= max_diff) for _ in diff_days]
# store in dates_diff dictionnary
dates_diff = []
cloud_cover = []
for i in range(len(idx_closest)):
if not idx_closest[i]:
continue
elif len(idx_closest[i]) > 1:
idx_best = np.argmin(np.abs([diff_days[i][_] for _ in idx_closest[i]]))
dates_temp = [dates_quad[_] for _ in idx_closest[i]]
days_temp = [diff_days[i][_] for _ in idx_closest[i]]
dates_diff.append({"date sat": dates_l8[i],
"date quad": dates_temp[idx_best],
"days diff": days_temp[idx_best]})
else:
dates_diff.append({"date sat": dates_l8[i],
"date quad": dates_quad[idx_closest[i][0]],
"days diff": diff_days[i][idx_closest[i][0]]
})
# store cloud data
cloud_cover.append(output['cloud_cover'][i])
# store wave data
wave_hsig = []
for i in range(len(dates_diff)):
wave_hsig.append(hsig[np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).total_seconds() for _ in dates_wave])))])
# make a plot
plt.figure()
counter = 0
for i in range(len(dates_diff)):
counter = counter + 1
if dates_diff[i]['date quad'] > dates_diff[i]['date sat']:
date_min = dates_diff[i]['date sat']
date_max = dates_diff[i]['date quad']
color1 = orange
color2 = blue
else:
date_min = dates_diff[i]['date quad']
date_max = dates_diff[i]['date sat']
color1 = blue
color2 = orange
idx_t = utils.find_indices(dates_wave, lambda e : e >= date_min and e <= date_max)
hsigmax = np.nanmax([hsig[i] for i in idx_t])
hsigmin = np.nanmin([hsig[i] for i in idx_t])
if counter > 9:
counter = 1
plt.figure()
ax = plt.subplot(3,3,counter)
plt.plot([dates_wave[i] for i in idx_t], [hsig[i] for i in idx_t], 'k-', linewidth=1.5)
plt.plot([date_min, date_min], [0, 4.5], color=color2, label='survey')
plt.plot([date_max, date_max], [0, 4.5], color=color1, label='landsat8')
plt.ylabel('Hs [m]')
ax.xaxis.set_major_locator(mdates.DayLocator(tz=au_tz))
ax.xaxis.set_minor_locator(mdates.HourLocator(tz=au_tz))
ax.xaxis.set_major_formatter(mdates.DateFormatter('%d'))
ax.xaxis.set_minor_locator(months)
plt.title(dates_diff[i]['date sat'].strftime('%b %Y') + ' (' + str(abs(dates_diff[i]['days diff'])) + ' days)')
plt.draw()
plt.gcf().subplots_adjust(hspace=0.5)
# mean day difference
np.mean([ np.abs(_['days diff']) for _ in dates_diff])
#%% Compare shorelines in elevation
dist_buffer = 50 # buffer of points selected for interpolation
# load quadbike .mat files
foldername = 'data\quadbike\surveys3D'
folderpath = os.path.join(os.getcwd(), foldername)
filenames = os.listdir(folderpath)
# get the satellite shorelines
sl = output['shorelines']
# get dates from filenames
dates_quad = [datetime(int(_[6:10]), int(_[11:13]), int(_[14:16]), tzinfo= au_tz) for _ in filenames]
zav = []
ztide = []
sl_gt = []
for i in range(len(dates_diff)):
sl_smooth = sl[i]
# select closest 3D survey and load .mat file
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).days for _ in dates_quad])))
survey3d = sio.loadmat(os.path.join(folderpath, filenames[idx_closest]))
# reshape to a vector
xs = survey3d['x'].reshape(survey3d['x'].shape[0] * survey3d['x'].shape[1])
ys = survey3d['y'].reshape(survey3d['y'].shape[0] * survey3d['y'].shape[1])
zs = survey3d['z'].reshape(survey3d['z'].shape[0] * survey3d['z'].shape[1])
# remove nan values
idx_nan = np.isnan(zs)
xs = xs[~idx_nan]
ys = ys[~idx_nan]
zs = zs[~idx_nan]
# find water level at the time the image was acquired
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).total_seconds() for _ in dates_tide])))
tide_level = tide[idx_closest]
ztide.append(tide_level)
# find contour corresponding to the water level on 3D surface (if below minimum, add 0.05m increments)
if tide_level < np.nanmin(survey3d['z']):
tide_level = np.nanmin(survey3d['z'])
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
count = 0
while len(sl_tide) < 900:
count = count + 1
tide_level = tide_level + 0.05*count
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
print('added ' + str(0.05*count) + ' cm - contour with ' + str(len(sl_tide)) + ' points')
else:
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
# remove nans
if np.any(np.isnan(sl_tide)):
index_nan = np.where(np.isnan(sl_tide))[0]
sl_tide = np.delete(sl_tide, index_nan, axis=0)
# get x,y coordinates
xtide = [survey3d['x'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
ytide = [survey3d['y'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
sl_gt.append(np.transpose(np.array([np.array(xtide), np.array(ytide)])))
# interpolate SDS on 3D surface to get elevation (point by point)
zq = []
for j in range(sl_smooth.shape[0]):
xq = sl_smooth[j,0]
yq = sl_smooth[j,1]
dist_q = np.linalg.norm(np.transpose(np.array([[xq - _ for _ in xs],[yq - _ for _ in ys]])), axis=1)
idx_buffer = dist_q <= dist_buffer
if sum(idx_buffer) > 0:
tck = interpolate.bisplrep(xs[idx_buffer], ys[idx_buffer], zs[idx_buffer])
zq.append(interpolate.bisplev(xq, yq, tck))
zq = np.array(zq)
plt.figure()
plt.hist(zq, bins=100)
plt.draw()
# plt.figure()
# plt.axis('equal')
# plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'),
# label='quad data')
# plt.plot(xs[idx_buffer], ys[idx_buffer], 'ko')
# plt.plot(xq,yq,'ro')
# plt.draw()
# store the alongshore median elevation
zav.append(np.median(utils.reject_outliers(zq, m=2)))
# make plot
red = [255/255, 0, 0]
gray = [0.75, 0.75, 0.75]
plt.figure()
plt.subplot(121)
plt.axis('equal')
plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'),
label='3D survey')
plt.plot(xtide, ytide, '--', color=gray, linewidth=2.5, label='tide level contour')
plt.plot(sl_smooth[:,0], sl_smooth[:,1], '-', color=red, linewidth=2.5, label='SDS')
# plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1], 'w-', linewidth=2)
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.title('Shoreline comparison')
plt.colorbar(label='mAHD')
plt.legend()
plt.ylim((6266100, 6267000))
plt.subplot(122)
plt.plot(np.linspace(0,1,len(zq)), zq, 'ko-', markersize=5)
plt.plot([0, 1], [zav[i], zav[i]], 'r-', label='median')
plt.plot([0, 1], [ztide[i], ztide[i]], 'g--', label = 'measured tide')
plt.xlabel('Northings [m]')
plt.ylabel('Elevation [mAHD]')
plt.title('Alongshore SDS elevation')
plt.legend()
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.tight_layout()
plt.draw()
print(i)
#%% Calculate some error statistics
zav = np.array(zav)
ztide = np.array(ztide)
f = plt.figure()
plt.subplot(3,1,1)
plt.bar(np.linspace(1,len(zav),len(zav)), zav-ztide)
plt.ylabel('Error in z [m]')
plt.title('Elevation error')
plt.xticks([])
plt.draw()
plt.subplot(3,1,2)
plt.bar(np.linspace(1,len(zav),len(zav)), wave_hsig, color=orange)
plt.ylabel('Hsig [m]')
plt.xticks([])
plt.draw()
plt.subplot(3,1,3)
plt.bar(np.linspace(1,len(zav),len(zav)), np.array(cloud_cover)*100, color='g')
plt.ylabel('Cloud cover %')
plt.xlabel('comparison #')
plt.grid(False)
plt.grid(axis='y')
f.subplots_adjust(hspace=0)
plt.draw()
np.sqrt(np.mean((zav - ztide)**2))
#%% plot to show LOWESS smoothing
#i = 0
#idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
#x = sl[i][idx_beach,0]
#y = sl[i][idx_beach,1]
#sl_smooth = lowess(x,y, frac=1./10, it = 10)
#
#plt.figure()
#plt.axis('equal')
#plt.scatter
#plt.plot(x,y,'bo', linewidth=2, label='original SDS')
#plt.plot(sl_smooth[:,1], sl_smooth[:,0], 'ro', linewidth=2, label='smoothed SDS')
#plt.legend()
#plt.xlabel('Eastings [m]')
#plt.ylabel('Northings [m]')
#plt.title('Local weighted scatterplot smoothing (LOWESS)')
#plt.draw()

@ -24,7 +24,7 @@ import skimage.measure as measure
# my functions
import functions.utils as utils
import functions.sds as sds
import functions.sds_old1 as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
@ -44,7 +44,7 @@ dates_quad = [datetime(dates_quad[i,0], dates_quad[i,1], dates_quad[i,2],
satname = 'L8'
sitename = 'NARRA'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'rb') as f:
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
output = pickle.load(f)
dates_l8 = output['t']
# convert to AEST
@ -195,7 +195,7 @@ filenames = os.listdir(folderpath)
sl = output['shorelines']
# load narrabeen beach points (manually digitized)
with open(os.path.join(os.getcwd(), 'olddata', 'narra_beach' + '.pkl'), 'rb') as f:
with open(os.path.join(os.getcwd(), 'old', 'olddata', 'narra_beach' + '.pkl'), 'rb') as f:
narrabeach = pickle.load(f)
# get dates from filenames
Loading…
Cancel
Save