forked from kilianv/CoastSat_WRL
You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
477 lines
15 KiB
Python
477 lines
15 KiB
Python
7 years ago
|
# -*- coding: utf-8 -*-
|
||
|
|
||
|
#==========================================================#
|
||
|
# Extract shorelines from Landsat images
|
||
|
#==========================================================#
|
||
|
|
||
|
# Initial settings
|
||
|
import ee
|
||
|
import matplotlib.pyplot as plt
|
||
|
import matplotlib.cm as cm
|
||
|
import numpy as np
|
||
|
import pandas as pd
|
||
|
from datetime import datetime
|
||
|
import pickle
|
||
|
import pdb
|
||
|
import pytz
|
||
|
from pylab import ginput
|
||
|
from osgeo import gdal
|
||
|
|
||
|
|
||
|
|
||
|
# image processing modules
|
||
|
import skimage.filters as filters
|
||
|
import skimage.exposure as exposure
|
||
|
import skimage.transform as transform
|
||
|
import sklearn.decomposition as decomposition
|
||
|
import skimage.morphology as morphology
|
||
|
import skimage.measure as measure
|
||
|
import skimage.color as color
|
||
|
import skimage.feature as feature
|
||
|
# machine learning modules
|
||
|
from sklearn.cluster import KMeans
|
||
|
|
||
|
# my modules
|
||
|
import functions.utils as utils
|
||
|
import functions.sds as sds
|
||
|
|
||
|
# some settings
|
||
|
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||
|
plt.rcParams['axes.grid'] = False
|
||
|
plt.rcParams['figure.max_open_warning'] = 100
|
||
|
ee.Initialize()
|
||
|
|
||
|
# parameters
|
||
|
cloud_thresh = 0.5 # threshold for cloud cover
|
||
|
plot_bool = False # if you want the plots
|
||
|
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||
|
min_contour_points = 100# minimum number of points contained in each water line
|
||
|
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||
|
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||
|
min_beach_size = 50 # number of pixels in a beach (pixel classification)
|
||
|
|
||
|
# select collection
|
||
|
satname = 'L8'
|
||
|
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') # Landsat 8 Tier 1 TOA
|
||
|
|
||
|
# location (Narrabeen-Collaroy beach)
|
||
|
polygon = [[[151.3473129272461,-33.69035274454718],
|
||
|
[151.2820816040039,-33.68206818063878],
|
||
|
[151.27281188964844,-33.74775138989556],
|
||
|
[151.3425064086914,-33.75231878701767],
|
||
|
[151.3473129272461,-33.69035274454718]]];
|
||
|
|
||
|
# dates
|
||
|
start_date = '2013-01-01'
|
||
|
end_date = '2018-12-31'
|
||
|
|
||
|
# filter by location and date
|
||
|
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
|
||
|
|
||
|
n_img = flt_col.size().getInfo()
|
||
|
print('Number of images covering the polygon:', n_img)
|
||
|
im_all = flt_col.getInfo().get('features')
|
||
|
|
||
|
i = 0 # first image
|
||
|
|
||
|
# find image in ee database
|
||
|
im = ee.Image(im_all[i].get('id'))
|
||
|
|
||
|
# load image as np.array
|
||
|
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, polygon, satname, plot_bool)
|
||
|
|
||
|
# rescale intensities
|
||
|
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
||
|
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
||
|
|
||
|
# pansharpen rgb image
|
||
|
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
||
|
|
||
|
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||
|
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||
|
|
||
|
# calculate NDWI
|
||
|
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
|
||
|
|
||
|
# edge detection
|
||
|
wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool)
|
||
|
|
||
|
# convert from pixels to world coordinates
|
||
|
wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m'])
|
||
|
|
||
|
# convert to output epsg spatial reference
|
||
|
wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg)
|
||
|
|
||
|
# classify sand pixels
|
||
|
im_sand = sds.classify_sand_unsupervised(im_ms_ps, im_pan, im_cloud, wl_pix, buffer_size, min_beach_size, plot_bool)
|
||
|
|
||
|
#pt_in = np.array(ginput(n=1, timeout=1000))
|
||
|
#if pt_in[0][0] < im_ms_ps.shape[1]/2:
|
||
|
win = np.ones((3,3))
|
||
|
|
||
|
im_features = np.zeros((sum(sum(im_sand)), 20))
|
||
|
im_features[:,[0,1,2,3,4]] = im_ms_ps[im_sand,:] # B G R NIR SWIR
|
||
|
im_features[:,5] = im_pan[im_sand] # Pan
|
||
|
im_features[:,6] = im_ndwi[im_sand] # NDWI
|
||
|
im_features[:,7] = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,2], im_cloud, False)[im_sand] # NDVI
|
||
|
im_features[:,8] = sds.nd_index(im_ms_ps[:,:,0], im_ms_ps[:,:,2], im_cloud, False)[im_sand] # ND Blue - Red
|
||
|
for i in range(9):
|
||
|
im_features[:,i+9] = ndimage.generic_filter(im_features[:,i], np.std, footprint = win)
|
||
|
|
||
|
|
||
|
im_ms_ps[im_sand,:]
|
||
|
|
||
|
im_grey = color.rgb2grey(im_ms_ps[:,:,[2,1,0]])
|
||
|
plt.figure()
|
||
|
plt.imshow(im_grey, cmap='gray')
|
||
|
plt.draw()
|
||
|
|
||
|
counts, bins = np.histogram(im_grey[~im_cloud], bins=255)
|
||
|
im_grey_d = np.digitize(im_grey, bins=bins) - 1
|
||
|
|
||
|
from scipy import ndimage
|
||
|
|
||
|
varianceMatrix1 = ndimage.generic_filter(im_grey_d, np.max, footprint = np.ones((3,3)))
|
||
|
varianceMatrix2 = ndimage.generic_filter(im_grey_d, np.min, footprint = np.ones((3,3)))
|
||
|
varianceMatrix = varianceMatrix1 - varianceMatrix2
|
||
|
im_grey = color.rgb2grey(im_ms_ps[:,:,[2,1,0]])
|
||
|
plt.figure()
|
||
|
plt.imshow(varianceMatrix, cmap='gray')
|
||
|
plt.draw()
|
||
|
#%%
|
||
|
data = gdal.Open('l8_test.tif', gdal.GA_ReadOnly)
|
||
|
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||
|
im = np.stack(bands, 2)
|
||
|
|
||
|
im_test = im[:,:,3]
|
||
|
|
||
|
plt.figure()
|
||
|
plt.imshow(im_test, cmap='gray')
|
||
|
plt.axis('image')
|
||
|
plt.draw()
|
||
|
|
||
|
im_stats = np.zeros((im_test.shape[0], im_test.shape[1], 6))
|
||
|
winsize = 5
|
||
|
prop_names = ('contrast', 'dissimilarity', 'homogeneity', 'energy', 'correlation', 'ASM')
|
||
|
for i in range(im_test.shape[0]):
|
||
|
print(int(np.round(100*i/im_test.shape[0])))
|
||
|
for j in range(im_test.shape[1]):
|
||
|
#windows needs to fit completely in image
|
||
|
if i <2 or j <2:
|
||
|
continue
|
||
|
if i > (im_test.shape[0] - 3) or j > (im_test.shape[1] - 3):
|
||
|
continue
|
||
|
|
||
|
#Calculate GLCM on a 3x3 window
|
||
|
glcm_window = im_test[i-2: i+3, j-2 : j+3]
|
||
|
glcm = feature.greycomatrix(glcm_window, [1], [0], symmetric = True, normed = True )
|
||
|
|
||
|
#Calculate contrast and replace center pixel
|
||
|
for k in range(6): im_stats[i,j,k] = feature.greycoprops(glcm, prop_names[k])
|
||
|
|
||
|
plt.figure()
|
||
|
for i in range(6):
|
||
|
plt.subplot(2,3,i+1)
|
||
|
plt.imshow(im_stats[:,:,i], cmap='jet')
|
||
|
plt.axis('image')
|
||
|
plt.title(prop_names[i])
|
||
|
plt.draw()
|
||
|
|
||
|
pixel_loc = [200, 200]
|
||
|
im_stats[pixel_loc[0], pixel_loc[1], 3]
|
||
|
#%%
|
||
|
|
||
|
for i in range(im_grey_d.shape[0]):
|
||
|
print(int(np.round(100*i/im_grey_d.shape[0])))
|
||
|
for j in range(im_grey_d.shape[1]):
|
||
|
#windows needs to fit completely in image
|
||
|
if i <1 or j <1:
|
||
|
continue
|
||
|
if i > (im_grey_d.shape[0] - 1) or j > (im_grey_d.shape[0] - 1):
|
||
|
continue
|
||
|
|
||
|
#Calculate GLCM on a 3x3 window
|
||
|
glcm_window = im_grey_d[i-1: i+1, j-1 : j+1]
|
||
|
glcm = feature.greycomatrix(glcm_window, [1,2], [0], levels=256, symmetric = True, normed = True )
|
||
|
|
||
|
#Calculate contrast and replace center pixel
|
||
|
im_stats[i,j,0] = feature.greycoprops(glcm, 'contrast')
|
||
|
im_stats[i,j,1] = feature.greycoprops(glcm, 'dissimilarity')
|
||
|
im_stats[i,j,2] = feature.greycoprops(glcm, 'homogeneity')
|
||
|
im_stats[i,j,3] = feature.greycoprops(glcm, 'energy')
|
||
|
im_stats[i,j,4] = feature.greycoprops(glcm, 'correlation')
|
||
|
im_stats[i,j,5] = feature.greycoprops(glcm, 'ASM')
|
||
|
|
||
|
plt.figure()
|
||
|
for i in range(6):
|
||
|
plt.subplot(2,3,i+1)
|
||
|
plt.imshow(im_stats[:,:,i], cmap='jet')
|
||
|
plt.axis('image')
|
||
|
plt.draw()
|
||
|
#%%
|
||
|
from multiprocessing import Pool
|
||
|
from itertools import product
|
||
|
|
||
|
N = 10000000
|
||
|
pool = Pool() #defaults to number of available CPU's
|
||
|
a = np.ones((N))
|
||
|
b = np.ones((N))*2
|
||
|
out = np.zeros((N))
|
||
|
|
||
|
t = time.time()
|
||
|
for i in range(len(a)):
|
||
|
out[i] = a[i]*b[i]
|
||
|
elapsed = time.time() - t
|
||
|
print(elapsed)
|
||
|
|
||
|
def fun(a,b):
|
||
|
return a*b
|
||
|
|
||
|
chunksize = 20 #this may take some guessing ... take a look at the docs to decide
|
||
|
for ind, res in enumerate(pool.map(fun, range(N)), chunksize):
|
||
|
output.flat[ind] = res
|
||
|
#%%
|
||
|
|
||
|
import gdal, osr
|
||
|
import numpy as np
|
||
|
from scipy.interpolate import RectBivariateSpline
|
||
|
from numpy.lib.stride_tricks import as_strided as ast
|
||
|
import dask.array as da
|
||
|
from joblib import Parallel, delayed, cpu_count
|
||
|
import os
|
||
|
from skimage.feature import greycomatrix, greycoprops
|
||
|
|
||
|
def im_resize(im,Nx,Ny):
|
||
|
'''
|
||
|
resize array by bivariate spline interpolation
|
||
|
'''
|
||
|
ny, nx = np.shape(im)
|
||
|
xx = np.linspace(0,nx,Nx)
|
||
|
yy = np.linspace(0,ny,Ny)
|
||
|
|
||
|
try:
|
||
|
im = da.from_array(im, chunks=1000) #dask implementation
|
||
|
except:
|
||
|
pass
|
||
|
|
||
|
newKernel = RectBivariateSpline(np.r_[:ny],np.r_[:nx],im)
|
||
|
return newKernel(yy,xx)
|
||
|
|
||
|
def p_me(Z, win):
|
||
|
'''
|
||
|
loop to calculate greycoprops
|
||
|
'''
|
||
|
try:
|
||
|
glcm = greycomatrix(Z, [5], [0], 256, symmetric=True, normed=True)
|
||
|
cont = greycoprops(glcm, 'contrast')
|
||
|
diss = greycoprops(glcm, 'dissimilarity')
|
||
|
homo = greycoprops(glcm, 'homogeneity')
|
||
|
eng = greycoprops(glcm, 'energy')
|
||
|
corr = greycoprops(glcm, 'correlation')
|
||
|
ASM = greycoprops(glcm, 'ASM')
|
||
|
return (cont, diss, homo, eng, corr, ASM)
|
||
|
except:
|
||
|
return (0,0,0,0,0,0)
|
||
|
|
||
|
def norm_shape(shap):
|
||
|
'''
|
||
|
Normalize numpy array shapes so they're always expressed as a tuple,
|
||
|
even for one-dimensional shapes.
|
||
|
'''
|
||
|
try:
|
||
|
i = int(shap)
|
||
|
return (i,)
|
||
|
except TypeError:
|
||
|
# shape was not a number
|
||
|
pass
|
||
|
|
||
|
try:
|
||
|
t = tuple(shap)
|
||
|
return t
|
||
|
except TypeError:
|
||
|
# shape was not iterable
|
||
|
pass
|
||
|
|
||
|
raise TypeError('shape must be an int, or a tuple of ints')
|
||
|
|
||
|
def sliding_window(a, ws, ss = None, flatten = True):
|
||
|
'''
|
||
|
Source: http://www.johnvinyard.com/blog/?p=268#more-268
|
||
|
Parameters:
|
||
|
a - an n-dimensional numpy array
|
||
|
ws - an int (a is 1D) or tuple (a is 2D or greater) representing the size
|
||
|
of each dimension of the window
|
||
|
ss - an int (a is 1D) or tuple (a is 2D or greater) representing the
|
||
|
amount to slide the window in each dimension. If not specified, it
|
||
|
defaults to ws.
|
||
|
flatten - if True, all slices are flattened, otherwise, there is an
|
||
|
extra dimension for each dimension of the input.
|
||
|
|
||
|
Returns
|
||
|
an array containing each n-dimensional window from a
|
||
|
'''
|
||
|
if None is ss:
|
||
|
# ss was not provided. the windows will not overlap in any direction.
|
||
|
ss = ws
|
||
|
ws = norm_shape(ws)
|
||
|
ss = norm_shape(ss)
|
||
|
# convert ws, ss, and a.shape to numpy arrays
|
||
|
ws = np.array(ws)
|
||
|
ss = np.array(ss)
|
||
|
shap = np.array(a.shape)
|
||
|
# ensure that ws, ss, and a.shape all have the same number of dimensions
|
||
|
ls = [len(shap),len(ws),len(ss)]
|
||
|
if 1 != len(set(ls)):
|
||
|
raise ValueError(\
|
||
|
'a.shape, ws and ss must all have the same length. They were %s' % str(ls))
|
||
|
|
||
|
# ensure that ws is smaller than a in every dimension
|
||
|
if np.any(ws > shap):
|
||
|
raise ValueError(\
|
||
|
'ws cannot be larger than a in any dimension.\
|
||
|
a.shape was %s and ws was %s' % (str(a.shape),str(ws)))
|
||
|
|
||
|
# how many slices will there be in each dimension?
|
||
|
newshape = norm_shape(((shap - ws) // ss) + 1)
|
||
|
|
||
|
|
||
|
# the shape of the strided array will be the number of slices in each dimension
|
||
|
# plus the shape of the window (tuple addition)
|
||
|
newshape += norm_shape(ws)
|
||
|
|
||
|
|
||
|
# the strides tuple will be the array's strides multiplied by step size, plus
|
||
|
# the array's strides (tuple addition)
|
||
|
newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
|
||
|
a = ast(a,shape = newshape,strides = newstrides)
|
||
|
if not flatten:
|
||
|
return a
|
||
|
# Collapse strided so that it has one more dimension than the window. I.e.,
|
||
|
# the new array is a flat list of slices.
|
||
|
meat = len(ws) if ws.shape else 0
|
||
|
firstdim = (np.product(newshape[:-meat]),) if ws.shape else ()
|
||
|
dim = firstdim + (newshape[-meat:])
|
||
|
# remove any dimensions with size 1
|
||
|
dim = filter(lambda i : i != 1,dim)
|
||
|
return a.reshape(dim), newshape
|
||
|
|
||
|
#Stuff to change
|
||
|
win = 3
|
||
|
meter = str(win/4)
|
||
|
merge = im_grey_d
|
||
|
Z,ind = sliding_window(merge,(win,win),(1,1))
|
||
|
|
||
|
Ny, Nx = np.shape(merge)
|
||
|
|
||
|
w = Parallel(n_jobs = cpu_count(), verbose=0)(delayed(p_me)(Z[k]) for k in xrange(len(Z)))
|
||
|
|
||
|
cont = [a[0] for a in w]
|
||
|
diss = [a[1] for a in w]
|
||
|
homo = [a[2] for a in w]
|
||
|
eng = [a[3] for a in w]
|
||
|
corr = [a[4] for a in w]
|
||
|
ASM = [a[5] for a in w]
|
||
|
|
||
|
|
||
|
#Reshape to match number of windows
|
||
|
plt_cont = np.reshape(cont , ( ind[0], ind[1] ) )
|
||
|
plt_diss = np.reshape(diss , ( ind[0], ind[1] ) )
|
||
|
plt_homo = np.reshape(homo , ( ind[0], ind[1] ) )
|
||
|
plt_eng = np.reshape(eng , ( ind[0], ind[1] ) )
|
||
|
plt_corr = np.reshape(corr , ( ind[0], ind[1] ) )
|
||
|
plt_ASM = np.reshape(ASM , ( ind[0], ind[1] ) )
|
||
|
del cont, diss, homo, eng, corr, ASM
|
||
|
|
||
|
#Resize Images to receive texture and define filenames
|
||
|
contrast = im_resize(plt_cont,Nx,Ny)
|
||
|
contrast[merge==0]=np.nan
|
||
|
dissimilarity = im_resize(plt_diss,Nx,Ny)
|
||
|
dissimilarity[merge==0]=np.nan
|
||
|
homogeneity = im_resize(plt_homo,Nx,Ny)
|
||
|
homogeneity[merge==0]=np.nan
|
||
|
energy = im_resize(plt_eng,Nx,Ny)
|
||
|
energy[merge==0]=np.nan
|
||
|
correlation = im_resize(plt_corr,Nx,Ny)
|
||
|
correlation[merge==0]=np.nan
|
||
|
ASM = im_resize(plt_ASM,Nx,Ny)
|
||
|
ASM[merge==0]=np.nan
|
||
|
|
||
|
#%%
|
||
|
plt.figure()
|
||
|
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||
|
for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
|
||
|
plt.axis('image')
|
||
|
plt.title('Detected water lines')
|
||
|
plt.show()
|
||
|
|
||
|
vec = im_ms_ps.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1], im_ms_ps.shape[2])
|
||
|
vec_pan = im_pan.reshape(im_pan.shape[0]*im_pan.shape[1])
|
||
|
features = np.zeros((len(vec), 5))
|
||
|
features[:,[0,1,2,3]] = vec[:,[0,1,2,3]]
|
||
|
features[:,4] = vec_pan
|
||
|
vec_mask = im_cloud.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||
|
|
||
|
# create buffer
|
||
|
im_buffer = np.zeros((im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||
|
for i, contour in enumerate(wl_pix):
|
||
|
indices = [(int(_[0]), int(_[1])) for _ in list(np.round(contour))]
|
||
|
for j, idx in enumerate(indices):
|
||
|
im_buffer[idx] = 1
|
||
|
|
||
|
|
||
|
plt.figure()
|
||
|
plt.imshow(im_buffer)
|
||
|
plt.draw()
|
||
|
|
||
|
se = morphology.disk(buffer_size)
|
||
|
im_buffer = morphology.binary_dilation(im_buffer, se)
|
||
|
|
||
|
plt.figure()
|
||
|
plt.imshow(im_buffer)
|
||
|
plt.draw()
|
||
|
|
||
|
vec_buffer = (im_buffer == 1).reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||
|
|
||
|
vec_buffer= np.logical_and(vec_buffer, ~vec_mask)
|
||
|
|
||
|
#vec_buffer = np.ravel_multi_index(z,(im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||
|
|
||
|
|
||
|
kmeans = KMeans(n_clusters=6, random_state=0).fit(vec[vec_buffer,:])
|
||
|
labels = kmeans.labels_
|
||
|
labels_full = np.ones((len(vec_mask))) * np.nan
|
||
|
labels_full[vec_buffer] = labels
|
||
|
im_labels = labels_full.reshape(im_ms_ps.shape[0], im_ms_ps.shape[1])
|
||
|
|
||
|
plt.figure()
|
||
|
plt.imshow(im_labels)
|
||
|
plt.axis('equal')
|
||
|
plt.draw()
|
||
|
|
||
|
utils.compare_images(im_labels, im_pan)
|
||
|
|
||
|
plt.figure()
|
||
|
for i in range(6): plt.plot(kmeans.cluster_centers_[i,:])
|
||
|
plt.draw()
|
||
|
|
||
|
im_sand = im_labels == np.argmax(np.mean(kmeans.cluster_centers_[:,[0,1,2,4]], axis=1))
|
||
|
|
||
|
im_sand2 = morphology.remove_small_objects(im_sand, min_size=min_beach_size, connectivity=2)
|
||
|
im_sand3 = morphology.binary_dilation(im_sand2, morphology.disk(1))
|
||
|
plt.figure()
|
||
|
plt.imshow(im_sand3)
|
||
|
plt.draw()
|
||
|
|
||
|
im_ms_ps[im_sand3,0] = 0
|
||
|
im_ms_ps[im_sand3,1] = 0
|
||
|
im_ms_ps[im_sand3,2] = 1
|
||
|
|
||
|
|
||
|
plt.figure()
|
||
|
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||
|
plt.axis('image')
|
||
|
plt.title('Sand classification')
|
||
|
plt.show()
|
||
|
|
||
|
#%%
|