forked from kilianv/CoastSat_WRL
Compare commits
15 Commits
master
...
developmen
Author | SHA1 | Date |
---|---|---|
kvos | c1c1e6aacb | 7 years ago |
kvos | b99c2acaf3 | 7 years ago |
kvos | 220e5557f7 | 7 years ago |
kvos | 0a78e3c539 | 7 years ago |
kvos | d2c344ec99 | 7 years ago |
kvos | 0af37d9c6a | 7 years ago |
kvos | 35f421bfcb | 7 years ago |
kvos | c31ae42fbd | 7 years ago |
kvos | f3f13ec180 | 7 years ago |
kvos | 0e78aace0c | 7 years ago |
kvos | 7ca64f2fa0 | 7 years ago |
Kilian Vos | 39c5bb05e1 | 7 years ago |
kvos | 1093ecfeba | 7 years ago |
kvos | 783cd5d033 | 7 years ago |
Kilian Vos | eecdb485fc | 7 years ago |
@ -0,0 +1,7 @@
|
||||
*.pyc
|
||||
*.mat
|
||||
*.tif
|
||||
*.png
|
||||
*.mp4
|
||||
*.gif
|
||||
*.jpg
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,113 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Download L7 images of a given area between given dates
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
from urllib.request import urlretrieve
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
import pickle
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
def download_tif(image, polygon, bandsId, filepath):
|
||||
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
|
||||
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
|
||||
'image': image.serialize(),
|
||||
'region': polygon,
|
||||
'bands': bandsId,
|
||||
'filePerBand': 'false',
|
||||
'name': 'data',
|
||||
}))
|
||||
local_zip, headers = urlretrieve(url)
|
||||
with zipfile.ZipFile(local_zip) as local_zipfile:
|
||||
return local_zipfile.extract('data.tif', filepath)
|
||||
|
||||
# select collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LE07/C01/T1_RT_TOA')
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
rect_narra = [[[151.301454, -33.700754],
|
||||
[151.311453, -33.702075],
|
||||
[151.307237, -33.739761],
|
||||
[151.294220, -33.736329],
|
||||
[151.301454, -33.700754]]];
|
||||
|
||||
# dates
|
||||
#start_date = '2016-01-01'
|
||||
#end_date = '2016-12-31'
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering Narrabeen:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
satname = 'L7'
|
||||
sitename = 'NARRA'
|
||||
suffix = '.tif'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
filepath_pan = os.path.join(filepath, 'pan')
|
||||
filepath_ms = os.path.join(filepath, 'ms')
|
||||
|
||||
all_names_pan = []
|
||||
all_names_ms = []
|
||||
timestamps = []
|
||||
# loop through all images
|
||||
for i in range(n_img):
|
||||
# find each image in ee database
|
||||
im = ee.Image(im_all[i].get('id'))
|
||||
im_dic = im.getInfo()
|
||||
im_bands = im_dic.get('bands')
|
||||
im_date = im_dic['properties']['DATE_ACQUIRED']
|
||||
t = im_dic['properties']['system:time_start']
|
||||
im_timestamp = datetime.fromtimestamp(t/1000, tz=pytz.utc)
|
||||
timestamps.append(im_timestamp)
|
||||
im_epsg = int(im_dic['bands'][0]['crs'][5:])
|
||||
|
||||
# delete dimensions key from dictionnary, otherwise the entire image is extracted
|
||||
for j in range(len(im_bands)): del im_bands[j]['dimensions']
|
||||
pan_band = [im_bands[7]]
|
||||
ms_bands = [im_bands[0], im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[9]]
|
||||
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + suffix
|
||||
|
||||
print(i)
|
||||
if any(filename_pan in _ for _ in all_names_pan):
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + '_r' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
|
||||
all_names_pan.append(filename_pan)
|
||||
|
||||
local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan)
|
||||
os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
|
||||
local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms)
|
||||
os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))
|
||||
|
||||
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(timestamps, f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(im_epsg, f)
|
||||
|
@ -0,0 +1,177 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Download L8 images of a given area between given dates
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
from urllib.request import urlretrieve
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
import pickle
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
def download_tif(image, polygon, bandsId, filepath):
|
||||
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
|
||||
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
|
||||
'image': image.serialize(),
|
||||
'region': polygon,
|
||||
'bands': bandsId,
|
||||
'filePerBand': 'false',
|
||||
'name': 'data',
|
||||
}))
|
||||
local_zip, headers = urlretrieve(url)
|
||||
with zipfile.ZipFile(local_zip) as local_zipfile:
|
||||
return local_zipfile.extract('data.tif', filepath)
|
||||
|
||||
# select collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
||||
# Location (Narrabeen all)
|
||||
#polygon = [[[151.3473129272461,-33.69035274454718],
|
||||
# [151.2820816040039,-33.68206818063878],
|
||||
# [151.27281188964844,-33.74775138989556],
|
||||
# [151.3425064086914,-33.75231878701767],
|
||||
# [151.3473129272461,-33.69035274454718]]];
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
#polygon = [[[151.301454, -33.700754],
|
||||
# [151.311453, -33.702075],
|
||||
# [151.307237, -33.739761],
|
||||
# [151.294220, -33.736329],
|
||||
# [151.301454, -33.700754]]];
|
||||
# location (Oldbar beach)
|
||||
#polygon = [[[152.664508, -31.896163],
|
||||
# [152.665827, -31.897112],
|
||||
# [152.631516, -31.924846],
|
||||
# [152.629285, -31.923362],
|
||||
# [152.664508, -31.896163]]]
|
||||
# location (Oldbar inlet)
|
||||
#polygon = [[[152.676283, -31.866784],
|
||||
# [152.709174, -31.869993],
|
||||
# [152.678229, -31.892082],
|
||||
# [152.670366, -31.886360],
|
||||
# [152.676283, -31.866784]]];
|
||||
# Location (Sand Engine)
|
||||
#polygon = [[[4.171742, 52.070455],
|
||||
# [4.223708, 52.069576],
|
||||
# [4.220808, 52.025293],
|
||||
# [4.147749, 52.028861],
|
||||
# [4.171742, 52.070455]]];
|
||||
# Location (Tairua)
|
||||
#polygon = [[[175.852115, -36.985414],
|
||||
# [175.872797, -36.985145],
|
||||
# [175.873738, -37.000039],
|
||||
# [175.853956, -36.998749],
|
||||
# [175.852115, -36.985414]]];
|
||||
# Location (Duck)
|
||||
#polygon = [[[-75.766220, 36.195928],
|
||||
# [-75.748282, 36.196401],
|
||||
# [-75.738851, 36.173974],
|
||||
# [-75.763546, 36.174249],
|
||||
# [-75.766220, 36.195928]]];
|
||||
# Location (Broulee Island)
|
||||
#polygon = [[[150.173557, -35.847138],
|
||||
# [150.196164, -35.848064],
|
||||
# [150.195143, -35.869967],
|
||||
# [150.172779, -35.861760],
|
||||
# [150.173557, -35.847138]]];
|
||||
# Location (Rarotonga, Muri lagoon)
|
||||
polygon = [[[-159.732071, -21.241348],
|
||||
[-159.719820, -21.242892],
|
||||
[-159.720006, -21.261134],
|
||||
[-159.731592, -21.258875],
|
||||
[-159.732071, -21.241348]]];
|
||||
|
||||
# dates
|
||||
start_date = '2013-01-01'
|
||||
end_date = '2019-01-01'
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering the area:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA_all'
|
||||
#sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'SANDMOTOR'
|
||||
#sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
#sitename = 'BROULEE'
|
||||
sitename = 'MURI'
|
||||
|
||||
|
||||
suffix = '.tif'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
filepath_pan = os.path.join(filepath, 'pan')
|
||||
filepath_ms = os.path.join(filepath, 'ms')
|
||||
|
||||
all_names_pan = []
|
||||
all_names_ms = []
|
||||
timestamps = []
|
||||
acc_georef = []
|
||||
# loop through all images
|
||||
for i in range(n_img):
|
||||
# find each image in ee database
|
||||
im = ee.Image(im_all[i].get('id'))
|
||||
im_dic = im.getInfo()
|
||||
im_bands = im_dic.get('bands')
|
||||
im_date = im_dic['properties']['DATE_ACQUIRED']
|
||||
t = im_dic['properties']['system:time_start']
|
||||
im_timestamp = datetime.fromtimestamp(t/1000, tz=pytz.utc)
|
||||
timestamps.append(im_timestamp)
|
||||
im_epsg = int(im_dic['bands'][0]['crs'][5:])
|
||||
try:
|
||||
acc_georef.append(im_dic['properties']['GEOMETRIC_RMSE_MODEL'])
|
||||
except:
|
||||
acc_georef.append(10)
|
||||
print('No geometric rmse model property')
|
||||
|
||||
# delete dimensions key from dictionnary, otherwise the entire image is extracted
|
||||
for j in range(len(im_bands)): del im_bands[j]['dimensions']
|
||||
pan_band = [im_bands[7]]
|
||||
ms_bands = [im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[5], im_bands[11]]
|
||||
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + suffix
|
||||
|
||||
print(i)
|
||||
if any(filename_pan in _ for _ in all_names_pan):
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + '_r' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
|
||||
all_names_pan.append(filename_pan)
|
||||
|
||||
local_data_pan = download_tif(im, polygon, pan_band, filepath_pan)
|
||||
os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
|
||||
local_data_ms = download_tif(im, polygon, ms_bands, filepath_ms)
|
||||
os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))
|
||||
|
||||
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(timestamps, f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(im_epsg, f)
|
||||
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(acc_georef, f)
|
@ -0,0 +1,106 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Draw reference points on satellite image
|
||||
#==========================================================#
|
||||
|
||||
# Preamble
|
||||
import os
|
||||
import ee
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.cm as cm
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
import pickle
|
||||
import pdb
|
||||
import pytz
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.morphology as morphology
|
||||
import skimage.measure as measure
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
# collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
||||
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
#polygon = [[[151.301454, -33.700754],
|
||||
# [151.311453, -33.702075],
|
||||
# [151.307237, -33.739761],
|
||||
# [151.294220, -33.736329],
|
||||
# [151.301454, -33.700754]]];
|
||||
# location (Oldbar shoreline)
|
||||
#polygon = [[[152.664508, -31.896163],
|
||||
# [152.665827, -31.897112],
|
||||
# [152.631516, -31.924846],
|
||||
# [152.629285, -31.923362],
|
||||
# [152.664508, -31.896163]]];
|
||||
# location (Oldbar inlet)
|
||||
polygon = [[[152.676283, -31.866784],
|
||||
[152.709174, -31.869993],
|
||||
[152.678229, -31.892082],
|
||||
[152.670366, -31.886360],
|
||||
[152.676283, -31.866784]]];
|
||||
|
||||
# dates
|
||||
start_date = '2017-01-30'
|
||||
end_date = '2017-02-02'
|
||||
#start_date = '2017-01-30'
|
||||
#end_date = '2018-02-02'
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering the area:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA'
|
||||
sitename = 'OLDBAR_inlet'
|
||||
|
||||
# parameters
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100 # minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
cloud_threshold = 0.8
|
||||
|
||||
# find image in ee database
|
||||
im = ee.Image(im_all[0].get('id'))
|
||||
# load image as np.array
|
||||
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, polygon, satname, plot_bool)
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.show()
|
||||
|
||||
pts = ginput(n=50, timeout=1000, show_clicks=True)
|
||||
points = np.array(pts)
|
||||
plt.plot(points[:,0], points[:,1], 'ko')
|
||||
plt.show()
|
||||
|
||||
pts_coords = sds.convert_pix2world(points[:,[1,0]], crs['crs_15m'])
|
||||
pts = sds.convert_epsg(pts_coords, crs['epsg_code'], output_epsg)
|
||||
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints2.pkl'), 'wb') as f:
|
||||
pickle.dump(pts, f)
|
@ -0,0 +1,322 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Extract shorelines from Landsat images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.3 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 30 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'rb') as f:
|
||||
acc_georef = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
|
||||
acc_georef_sorted = [acc_georef[j] for j in idx_sorted]
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
acc_georef_ts = []
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
#%%
|
||||
for i in [20]:#range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrows = im_pan.shape[0]
|
||||
ncols = im_pan.shape[1]
|
||||
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
|
||||
# calculate cloud cover and skip image if too high
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(cloud_cover) + ')')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# check if image for that date already exists and choose the best in terms of cloud cover and georeferencing
|
||||
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
||||
|
||||
# find the index of the image that is repeated
|
||||
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
||||
idx_samedate = idx_samedate[0]
|
||||
print('cloud cover ' + str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
||||
print('acc georef ' + str(acc_georef_sorted[i]) + ' - ' + str(acc_georef_ts[idx_samedate]))
|
||||
|
||||
# keep image with less cloud cover or best georeferencing accuracy
|
||||
if cloud_cover < cloud_cover_ts[idx_samedate] - 0.01:
|
||||
skip = False
|
||||
elif acc_georef_sorted[i] < acc_georef_ts[idx_samedate]:
|
||||
skip = False
|
||||
else:
|
||||
skip = True
|
||||
|
||||
if skip:
|
||||
print('skip ' + str(i) + ' - repeated')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
del shorelines[idx_samedate]
|
||||
del t[idx_samedate]
|
||||
del cloud_cover_ts[idx_samedate]
|
||||
del date_acquired_ts[idx_samedate]
|
||||
del acc_georef_ts[idx_samedate]
|
||||
print('keep ' + str(i) + ' - deleted ' + str(idx_samedate))
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# rescale pansharpened RGB for visualisation
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
# classify image in 4 classes (sand, whitewater, water, other) with NN classifier
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, True)
|
||||
|
||||
t.append(timestamps_sorted[i])
|
||||
cloud_cover_ts.append(cloud_cover)
|
||||
acc_georef_ts.append(acc_georef_sorted[i])
|
||||
date_acquired_ts.append(file_names_pan[i][9:19])
|
||||
|
||||
# labels
|
||||
im_sand = im_classif == 1
|
||||
im_swash = im_classif == 2
|
||||
im_water = im_classif == 3
|
||||
vec_sand = im_sand.reshape(ncols*nrows)
|
||||
vec_water = im_water.reshape(ncols*nrows)
|
||||
vec_swash = im_swash.reshape(ncols*nrows)
|
||||
|
||||
# calculate indices and stack into a vector
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
im_ndmwi = sds.nd_index(im_ms_ps[:,:,4], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
im_nir = im_ms_ps[:,:,3]
|
||||
im_swir = im_ms_ps[:,:,4]
|
||||
im_ind = np.stack((im_ndwi, im_ndmwi), axis=-1)
|
||||
vec_ind = im_ind.reshape(nrows*ncols,2)
|
||||
|
||||
# remove noise and only keep the sand belonging to large beaches
|
||||
morphology.remove_small_objects(im_sand, min_size=50, connectivity=2, in_place=True)
|
||||
# create a buffer around beach
|
||||
buffer_size = 7
|
||||
se = morphology.disk(buffer_size)
|
||||
im_buffer = morphology.binary_dilation(im_sand, se)
|
||||
vec_buffer = im_buffer.reshape(nrows*ncols)
|
||||
|
||||
# display buffer
|
||||
im = np.copy(im_display)
|
||||
im[~im_buffer,0] = 1
|
||||
im[~im_buffer,1] = 1
|
||||
im[~im_buffer,2] = 1
|
||||
|
||||
im2 = np.copy(im_ndmwi)
|
||||
im2[~im_buffer] = np.nan
|
||||
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im)
|
||||
plt.axis('off')
|
||||
plt.title('RGB')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im2, cmap='seismic')
|
||||
plt.colorbar()
|
||||
plt.axis('off')
|
||||
plt.title('Water Index')
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
|
||||
# select water/sand/swash pixels that are within the buffer
|
||||
int_water = vec_ind[np.logical_and(vec_buffer,vec_water),:]
|
||||
int_sand = vec_ind[np.logical_and(vec_buffer,vec_sand),:]
|
||||
int_swash = vec_ind[np.logical_and(vec_buffer,vec_swash),:]
|
||||
|
||||
# append sand and water
|
||||
int_all = np.append(int_water,int_sand, axis=0)
|
||||
t_ndwi = filters.threshold_otsu(int_all[:,0])
|
||||
t_ndmwi = filters.threshold_otsu(int_all[:,1])
|
||||
|
||||
|
||||
fig, ax = plt.subplots(2,1, sharex=True)
|
||||
vals = ax[0].hist(int_water[:,0], bins=100, label='water')
|
||||
ax[0].hist(int_sand[:,0], bins=100, alpha=0.5, label='sand')
|
||||
ax[0].hist(int_swash[:,0], bins=100, alpha=0.5, label='swash')
|
||||
ax[0].plot([t_ndwi, t_ndwi], [0, np.max(vals[0])], 'r-')
|
||||
ax[0].legend()
|
||||
ax[0].set_title('Water Index NIR-G')
|
||||
vals = ax[1].hist(int_water[:,1], bins=100, label='water')
|
||||
ax[1].hist(int_sand[:,1], bins=100, alpha=0.5, label='sand')
|
||||
ax[1].hist(int_swash[:,1], bins=100, alpha=0.5, label='swash')
|
||||
ax[1].plot([t_ndmwi, t_ndmwi], [0, np.max(vals[0])], 'r-')
|
||||
ax[1].legend()
|
||||
ax[1].set_title('Modified Water Index SWIR-G')
|
||||
plt.draw()
|
||||
|
||||
im_ndwi_buffer = np.copy(im_ndwi)
|
||||
im_ndwi_buffer[~im_buffer] = np.nan
|
||||
|
||||
contours1 = measure.find_contours(im_ndwi_buffer, t_ndwi)
|
||||
|
||||
im_ndmwi_buffer = np.copy(im_ndmwi)
|
||||
im_ndmwi_buffer[~im_buffer] = np.nan
|
||||
|
||||
contours2 = measure.find_contours(im_ndmwi_buffer, t_ndmwi)
|
||||
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(1,3,1)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
plt.imshow(im)
|
||||
for i,contour in enumerate(contours2): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
plt.tight_layout()
|
||||
plt.grid(False)
|
||||
plt.draw()
|
||||
|
||||
plt.subplot(1,3,2, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_display)
|
||||
for i,contour in enumerate(contours2): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
plt.tight_layout()
|
||||
plt.grid(False)
|
||||
plt.draw()
|
||||
|
||||
plt.subplot(1,3,3, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_ndmwi, cmap='seismic')
|
||||
plt.colorbar()
|
||||
for i,contour in enumerate(contours2): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
plt.tight_layout()
|
||||
plt.grid(False)
|
||||
plt.draw()
|
||||
|
||||
|
||||
# plot of all the indices
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(1,5,1)
|
||||
plt.imshow(im_display)
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('off')
|
||||
plt.title('RGB')
|
||||
plt.subplot(1,5,2, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_ndwi, cmap='seismic')
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('off')
|
||||
plt.title('NDWI')
|
||||
plt.subplot(1,5,3, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_ndmwi, cmap='seismic')
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('off')
|
||||
plt.title('NDMWI')
|
||||
plt.subplot(1,5,4, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_nir, cmap='seismic')
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('off')
|
||||
plt.title('NIR')
|
||||
plt.subplot(1,5,5, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_swir, cmap='seismic')
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('off')
|
||||
plt.title('SWIR')
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Binary file not shown.
@ -0,0 +1,685 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Thu Mar 1 11:20:35 2018
|
||||
|
||||
@author: z5030440
|
||||
"""
|
||||
|
||||
"""This script contains the functions needed for satellite derived shoreline (SDS) extraction"""
|
||||
|
||||
# Initial settings
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import tempfile
|
||||
from urllib.request import urlretrieve
|
||||
import zipfile
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from sklearn.cluster import KMeans
|
||||
|
||||
|
||||
# import own modules
|
||||
from functions.utils import *
|
||||
|
||||
|
||||
# Download from ee server function
|
||||
|
||||
def download_tif(image, polygon, bandsId):
|
||||
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
|
||||
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
|
||||
'image': image.serialize(),
|
||||
'region': polygon,
|
||||
'bands': bandsId,
|
||||
'filePerBand': 'false',
|
||||
'name': 'data',
|
||||
}))
|
||||
local_zip, headers = urlretrieve(url)
|
||||
with zipfile.ZipFile(local_zip) as local_zipfile:
|
||||
return local_zipfile.extract('data.tif', tempfile.mkdtemp())
|
||||
|
||||
def load_image(image, polygon, bandsId):
|
||||
"""
|
||||
Loads an ee.Image() as a np.array. e.Image() is retrieved from the EE database.
|
||||
The geographic area and bands to select can be specified
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
image: ee.Image()
|
||||
image objec from the EE database
|
||||
polygon: list
|
||||
coordinates of the points creating a polygon. Each point is a list with 2 values
|
||||
bandsId: list
|
||||
bands to select, each band is a dictionnary in the list containing the following keys:
|
||||
crs, crs_transform, data_type and id. NOTE: you have to remove the key dimensions, otherwise
|
||||
the entire image is retrieved.
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
image_array : np.ndarray
|
||||
An array containing the image (2D if one band, otherwise 3D)
|
||||
georef : np.ndarray
|
||||
6 element vector containing the crs_parameters
|
||||
[X_ul_corner Xscale Xshear Y_ul_corner Yshear Yscale]
|
||||
"""
|
||||
|
||||
local_tif_filename = download_tif(image, polygon, bandsId)
|
||||
dataset = gdal.Open(local_tif_filename, gdal.GA_ReadOnly)
|
||||
georef = np.array(dataset.GetGeoTransform())
|
||||
bands = [dataset.GetRasterBand(i + 1).ReadAsArray() for i in range(dataset.RasterCount)]
|
||||
return np.stack(bands, 2), georef
|
||||
|
||||
def create_cloud_mask(im_qa, satname, plot_bool):
|
||||
"""
|
||||
Creates a cloud mask from the image containing the QA band information
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_qa: np.ndarray
|
||||
Image containing the QA band
|
||||
satname: string
|
||||
short name for the satellite (L8, L7, S2)
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
cloud_mask : np.ndarray of booleans
|
||||
A boolean array with True where the cloud are present
|
||||
"""
|
||||
|
||||
# convert QA bits
|
||||
if satname == 'L8':
|
||||
cloud_values = [2800, 2804, 2808, 2812, 6896, 6900, 6904, 6908]
|
||||
elif satname == 'L7':
|
||||
cloud_values = [752, 756, 760, 764]
|
||||
|
||||
cloud_mask = np.isin(im_qa, cloud_values)
|
||||
# remove isolated cloud pixels (there are some in the swash and they cause problems)
|
||||
if sum(sum(cloud_mask)) > 0:
|
||||
morphology.remove_small_objects(cloud_mask, min_size=10, connectivity=1, in_place=True)
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
plt.imshow(cloud_mask, cmap='gray')
|
||||
plt.draw()
|
||||
|
||||
#cloud_shadow_values = [2976, 2980, 2984, 2988, 3008, 3012, 3016, 3020]
|
||||
#cloud_shadow_mask = np.isin(im_qa, cloud_shadow_values)
|
||||
|
||||
return cloud_mask
|
||||
|
||||
def read_eeimage(im, polygon, sat_name, plot_bool):
|
||||
"""
|
||||
Read an ee.Image() object and returns the panchromatic band, multispectral bands (B, G, R, NIR, SWIR)
|
||||
and a cloud mask. All outputs are at 15m resolution (bilinear interpolation for the multispectral bands)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im: ee.Image()
|
||||
Image to read from the Google Earth Engine database
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_pan: np.ndarray (2D)
|
||||
The panchromatic band (15m)
|
||||
im_ms: np.ndarray (3D)
|
||||
The multispectral bands interpolated at 15m
|
||||
im_cloud: np.ndarray (2D)
|
||||
The cloud mask at 15m
|
||||
crs_params: list
|
||||
EPSG code and affine transformation parameters
|
||||
"""
|
||||
|
||||
im_dic = im.getInfo()
|
||||
# save metadata
|
||||
im_meta = im_dic.get('properties')
|
||||
meta = {'timestamp':im_meta['system:time_start'],
|
||||
'date_acquired':im_meta['DATE_ACQUIRED'],
|
||||
'geom_rmse_model':im_meta['GEOMETRIC_RMSE_MODEL'],
|
||||
'gcp_model':im_meta['GROUND_CONTROL_POINTS_MODEL'],
|
||||
'quality':im_meta['IMAGE_QUALITY_OLI'],
|
||||
'sun_azimuth':im_meta['SUN_AZIMUTH'],
|
||||
'sun_elevation':im_meta['SUN_ELEVATION']}
|
||||
|
||||
im_bands = im_dic.get('bands')
|
||||
|
||||
# delete dimensions key from dictionnary, otherwise the entire image is extracted
|
||||
for i in range(len(im_bands)): del im_bands[i]['dimensions']
|
||||
|
||||
# load panchromatic band
|
||||
pan_band = [im_bands[7]]
|
||||
im_pan, crs_pan = load_image(im, polygon, pan_band)
|
||||
im_pan = im_pan[:,:,0]
|
||||
|
||||
# load the multispectral bands (B2,B3,B4,B5,B6) = (blue,green,red,nir,swir1)
|
||||
ms_bands = [im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[5]]
|
||||
im_ms_30m, crs_ms = load_image(im, polygon, ms_bands)
|
||||
|
||||
# create cloud mask
|
||||
qa_band = [im_bands[11]]
|
||||
im_qa, crs_qa = load_image(im, polygon, qa_band)
|
||||
im_qa = im_qa[:,:,0]
|
||||
im_cloud = create_cloud_mask(im_qa, sat_name, plot_bool)
|
||||
im_cloud = transform.resize(im_cloud, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True, mode='constant').astype('bool_')
|
||||
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms_30m,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
|
||||
# check if -inf values (means out of image) and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
im_cloud = np.logical_or(np.logical_or(im_cloud, im_inf), im_nan)
|
||||
|
||||
# get the crs parameters for the image at 15m and 30m resolution
|
||||
crs = {'crs_15m':crs_pan, 'crs_30m':crs_ms, 'epsg_code':int(pan_band[0]['crs'][5:])}
|
||||
|
||||
if plot_bool:
|
||||
|
||||
# if there are -inf in the image, set them to 0 before plotting
|
||||
if sum(sum(np.isin(im_ms_30m[:,:,0], -np.inf).astype(int))) > 0:
|
||||
idx = np.isin(im_ms_30m[:,:,0], -np.inf)
|
||||
im_ms_30m[idx,0] = 0; im_ms_30m[idx,1] = 0; im_ms_30m[idx,2] = 0;
|
||||
im_ms_30m[idx,3] = 0; im_ms_30m[idx,4] = 0
|
||||
|
||||
plt.figure()
|
||||
|
||||
plt.subplot(221)
|
||||
plt.imshow(im_pan, cmap='gray')
|
||||
plt.title('PANCHROMATIC')
|
||||
|
||||
plt.subplot(222)
|
||||
plt.imshow(im_ms_30m[:,:,[2,1,0]])
|
||||
plt.title('RGB')
|
||||
|
||||
|
||||
plt.subplot(223)
|
||||
plt.imshow(im_ms_30m[:,:,3], cmap='gray')
|
||||
plt.title('NIR')
|
||||
|
||||
plt.subplot(224)
|
||||
plt.imshow(im_ms_30m[:,:,4], cmap='gray')
|
||||
plt.title('SWIR')
|
||||
|
||||
plt.show()
|
||||
|
||||
return im_pan, im_ms, im_cloud, crs, meta
|
||||
|
||||
|
||||
def rescale_image_intensity(im, cloud_mask, prob_high, plot_bool):
|
||||
"""
|
||||
Rescales the intensity of an image (multispectral or single band) by applying
|
||||
a cloud mask and clipping the prob_high upper percentile. This functions allows
|
||||
to stretch the contrast of an image.
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im: np.ndarray
|
||||
Image to rescale, can be 3D (multispectral) or 2D (single band)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
prob_high: float
|
||||
probability of exceedence used to calculate the upper percentile
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_adj: np.ndarray
|
||||
The rescaled image
|
||||
"""
|
||||
prc_low = 0 # lower percentile
|
||||
vec_mask = cloud_mask.reshape(im.shape[0] * im.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
|
||||
if len(im.shape) > 2:
|
||||
vec = im.reshape(im.shape[0] * im.shape[1], im.shape[2])
|
||||
vec_adj = np.ones((len(vec_mask), im.shape[2])) * np.nan
|
||||
|
||||
for i in range(im.shape[2]):
|
||||
prc_high = np.percentile(vec[~vec_mask, i], prob_high)
|
||||
vec_rescaled = exposure.rescale_intensity(vec[~vec_mask, i], in_range=(prc_low, prc_high))
|
||||
vec_adj[~vec_mask,i] = vec_rescaled
|
||||
|
||||
if plot_bool:
|
||||
plt.subplot(np.floor(im.shape[2]/2) + 1, np.floor(im.shape[2]/2), i+1)
|
||||
plt.hist(vec[~vec_mask, i], bins=200, label='original')
|
||||
plt.hist(vec_rescaled, bins=200, alpha=0.5, label='rescaled')
|
||||
plt.legend()
|
||||
plt.title('Band' + str(i+1))
|
||||
plt.show()
|
||||
|
||||
im_adj = vec_adj.reshape(im.shape[0], im.shape[1], im.shape[2])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_adj[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Rescaled')
|
||||
plt.show()
|
||||
|
||||
else:
|
||||
vec = im.reshape(im.shape[0] * im.shape[1])
|
||||
vec_adj = np.ones(len(vec_mask)) * np.nan
|
||||
prc_high = np.percentile(vec[~vec_mask], prob_high)
|
||||
vec_rescaled = exposure.rescale_intensity(vec[~vec_mask], in_range=(prc_low, prc_high))
|
||||
vec_adj[~vec_mask] = vec_rescaled
|
||||
|
||||
if plot_bool:
|
||||
plt.hist(vec[~vec_mask], bins=200, label='original')
|
||||
plt.hist(vec_rescaled, bins=200, alpha=0.5, label='rescaled')
|
||||
plt.legend()
|
||||
plt.title('Single band')
|
||||
plt.show()
|
||||
|
||||
im_adj = vec_adj.reshape(im.shape[0], im.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im, cmap='gray')
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_adj, cmap='gray')
|
||||
plt.axis('off')
|
||||
plt.title('Rescaled')
|
||||
plt.show()
|
||||
|
||||
return im_adj
|
||||
|
||||
|
||||
def hist_match(source, template):
|
||||
"""
|
||||
Adjust the pixel values of a grayscale image such that its histogram
|
||||
matches that of a target image
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
source: np.ndarray
|
||||
Image to transform; the histogram is computed over the flattened
|
||||
array
|
||||
template: np.ndarray
|
||||
Template image; can have different dimensions to source
|
||||
Returns:
|
||||
-----------
|
||||
matched: np.ndarray
|
||||
The transformed output image
|
||||
"""
|
||||
|
||||
oldshape = source.shape
|
||||
source = source.ravel()
|
||||
template = template.ravel()
|
||||
|
||||
# get the set of unique pixel values and their corresponding indices and
|
||||
# counts
|
||||
s_values, bin_idx, s_counts = np.unique(source, return_inverse=True,
|
||||
return_counts=True)
|
||||
t_values, t_counts = np.unique(template, return_counts=True)
|
||||
|
||||
# take the cumsum of the counts and normalize by the number of pixels to
|
||||
# get the empirical cumulative distribution functions for the source and
|
||||
# template images (maps pixel value --> quantile)
|
||||
s_quantiles = np.cumsum(s_counts).astype(np.float64)
|
||||
s_quantiles /= s_quantiles[-1]
|
||||
t_quantiles = np.cumsum(t_counts).astype(np.float64)
|
||||
t_quantiles /= t_quantiles[-1]
|
||||
|
||||
# interpolate linearly to find the pixel values in the template image
|
||||
# that correspond most closely to the quantiles in the source image
|
||||
interp_t_values = np.interp(s_quantiles, t_quantiles, t_values)
|
||||
|
||||
return interp_t_values[bin_idx].reshape(oldshape)
|
||||
|
||||
def pansharpen(im_ms, im_pan, cloud_mask, plot_bool):
|
||||
"""
|
||||
Pansharpens a multispectral image (3D), using the panchromatic band (2D)
|
||||
and a cloud mask
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms: np.ndarray
|
||||
Multispectral image to pansharpen (3D)
|
||||
im_pan: np.ndarray
|
||||
Panchromatic band (2D)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened multisoectral image (3D)
|
||||
"""
|
||||
|
||||
# reshape image into vector and apply cloud mask
|
||||
vec = im_ms.reshape(im_ms.shape[0] * im_ms.shape[1], im_ms.shape[2])
|
||||
vec_mask = cloud_mask.reshape(im_ms.shape[0] * im_ms.shape[1])
|
||||
vec = vec[~vec_mask, :]
|
||||
# apply PCA to RGB bands
|
||||
pca = decomposition.PCA()
|
||||
vec_pcs = pca.fit_transform(vec)
|
||||
# replace 1st PC with pan band (after matching histograms)
|
||||
vec_pan = im_pan.reshape(im_pan.shape[0] * im_pan.shape[1])
|
||||
vec_pan = vec_pan[~vec_mask]
|
||||
vec_pcs[:,0] = hist_match(vec_pan, vec_pcs[:,0])
|
||||
vec_ms_ps = pca.inverse_transform(vec_pcs)
|
||||
|
||||
# normalise between 0 and 1
|
||||
for i in range(vec_pcs.shape[1]):
|
||||
vec_ms_ps[:,i] = np.divide(vec_ms_ps[:,i] - np.min(vec_ms_ps[:,i]),
|
||||
np.max(vec_ms_ps[:,i]) - np.min(vec_ms_ps[:,i]))
|
||||
# reshape vector into image
|
||||
vec_ms_ps_full = np.ones((len(vec_mask), im_ms.shape[2])) * np.nan
|
||||
vec_ms_ps_full[~vec_mask,:] = vec_ms_ps
|
||||
im_ms_ps = vec_ms_ps_full.reshape(im_ms.shape[0], im_ms.shape[1], im_ms.shape[2])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im_ms[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Pansharpened')
|
||||
plt.show()
|
||||
|
||||
return im_ms_ps
|
||||
|
||||
def nd_index(im1, im2, cloud_mask, plot_bool):
|
||||
"""
|
||||
Computes normalised difference index on 2 images (2D), given a cloud mask (2D)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im1, im2: np.ndarray
|
||||
Images (2D) with which to calculate the ND index
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
im_nd: np.ndarray
|
||||
|
||||
Image (2D) containing the ND index
|
||||
"""
|
||||
vec_mask = cloud_mask.reshape(im1.shape[0] * im1.shape[1])
|
||||
vec_nd = np.ones(len(vec_mask)) * np.nan
|
||||
vec1 = im1.reshape(im1.shape[0] * im1.shape[1])
|
||||
vec2 = im2.reshape(im2.shape[0] * im2.shape[1])
|
||||
temp = np.divide(vec1[~vec_mask] - vec2[~vec_mask],
|
||||
vec1[~vec_mask] + vec2[~vec_mask])
|
||||
vec_nd[~vec_mask] = temp
|
||||
im_nd = vec_nd.reshape(im1.shape[0], im1.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
plt.imshow(im_nd, cmap='seismic')
|
||||
plt.colorbar()
|
||||
plt.title('Normalised index')
|
||||
plt.show()
|
||||
|
||||
return im_nd
|
||||
|
||||
def find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool):
|
||||
"""
|
||||
Computes normalised difference index on 2 images (2D), given a cloud mask (2D)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ndwi: np.ndarray
|
||||
Image (2D) with the NDWI (water index)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
min_contour_points: int
|
||||
minimum number of points in each contour line
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
contours_wl: list of np.arrays
|
||||
contains the (row,column) coordinates of the contour lines
|
||||
|
||||
"""
|
||||
|
||||
# reshape image to vector
|
||||
vec_ndwi = im_ndwi.reshape(im_ndwi.shape[0] * im_ndwi.shape[1])
|
||||
vec_mask = cloud_mask.reshape(cloud_mask.shape[0] * cloud_mask.shape[1])
|
||||
vec = vec_ndwi[~vec_mask]
|
||||
# apply otsu's threshold
|
||||
t_otsu = filters.threshold_otsu(vec)
|
||||
# use Marching Squares algorithm to detect contours on ndwi image
|
||||
contours = measure.find_contours(im_ndwi, t_otsu)
|
||||
# filter water lines
|
||||
contours_wl = []
|
||||
for i, contour in enumerate(contours):
|
||||
# remove contour points that are around clouds (nan values)
|
||||
if np.any(np.isnan(contour)):
|
||||
index_nan = np.where(np.isnan(contour))[0]
|
||||
contour = np.delete(contour, index_nan, axis=0)
|
||||
# remove contours that have only few points (less than min_contour_points)
|
||||
if contour.shape[0] > min_contour_points:
|
||||
contours_wl.append(contour)
|
||||
|
||||
if plot_bool:
|
||||
# plot otsu's histogram segmentation
|
||||
plt.figure()
|
||||
vals = plt.hist(vec, bins=200)
|
||||
plt.plot([t_otsu, t_otsu],[0, np.max(vals[0])], 'r-', label='Otsu threshold')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
# plot the water line contours on top of water index
|
||||
plt.figure()
|
||||
plt.imshow(im_ndwi, cmap='seismic')
|
||||
plt.colorbar()
|
||||
for i,contour in enumerate(contours_wl): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
plt.axis('image')
|
||||
plt.title('Detected water lines')
|
||||
plt.show()
|
||||
|
||||
return contours_wl
|
||||
|
||||
def convert_pix2world(points, crs_vec):
|
||||
"""
|
||||
Converts pixel coordinates (row,columns) to world projected coordinates
|
||||
performing an affine transformation
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
points: np.ndarray or list of np.ndarray
|
||||
array with 2 columns (rows first and columns second)
|
||||
crs_vec: np.ndarray
|
||||
vector of 6 elements [Xtr, Xscale, Xshear, Ytr, Yshear, Yscale]
|
||||
|
||||
Returns: -----------
|
||||
points_converted: np.ndarray or list of np.ndarray
|
||||
converted coordinates, first columns with X and second column with Y
|
||||
|
||||
"""
|
||||
|
||||
# make affine transformation matrix
|
||||
aff_mat = np.array([[crs_vec[1], crs_vec[2], crs_vec[0]],
|
||||
[crs_vec[4], crs_vec[5], crs_vec[3]],
|
||||
[0, 0, 1]])
|
||||
# create affine transformation
|
||||
tform = transform.AffineTransform(aff_mat)
|
||||
|
||||
if type(points) is list:
|
||||
points_converted = []
|
||||
# iterate over the list
|
||||
for i, arr in enumerate(points):
|
||||
tmp = arr[:,[1,0]]
|
||||
points_converted.append(tform(tmp))
|
||||
|
||||
elif type(points) is np.ndarray:
|
||||
tmp = points[:,[1,0]]
|
||||
points_converted = tform(tmp)
|
||||
|
||||
else:
|
||||
print('invalid input type')
|
||||
raise
|
||||
|
||||
return points_converted
|
||||
|
||||
def convert_epsg(points, epsg_in, epsg_out):
|
||||
"""
|
||||
Converts from one spatial reference to another using the epsg codes
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
points: np.ndarray or list of np.ndarray
|
||||
array with 2 columns (rows first and columns second)
|
||||
epsg_in: int
|
||||
epsg code of the spatial reference in which the input is
|
||||
epsg_out: int
|
||||
epsg code of the spatial reference in which the output will be
|
||||
|
||||
Returns: -----------
|
||||
points_converted: np.ndarray or list of np.ndarray
|
||||
converted coordinates
|
||||
|
||||
"""
|
||||
|
||||
# define input and output spatial references
|
||||
inSpatialRef = osr.SpatialReference()
|
||||
inSpatialRef.ImportFromEPSG(epsg_in)
|
||||
outSpatialRef = osr.SpatialReference()
|
||||
outSpatialRef.ImportFromEPSG(epsg_out)
|
||||
# create a coordinates transform
|
||||
coordTransform = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
|
||||
# transform points
|
||||
if type(points) is list:
|
||||
points_converted = []
|
||||
# iterate over the list
|
||||
for i, arr in enumerate(points):
|
||||
points_converted.append(np.array(coordTransform.TransformPoints(arr)))
|
||||
|
||||
elif type(points) is np.ndarray:
|
||||
points_converted = np.array(coordTransform.TransformPoints(points))
|
||||
|
||||
else:
|
||||
print('invalid input type')
|
||||
raise
|
||||
|
||||
return points_converted
|
||||
|
||||
def classify_sand_unsupervised(im_ms_ps, im_pan, cloud_mask, wl_pix, buffer_size, min_beach_size, plot_bool):
|
||||
"""
|
||||
Classifies sand pixels using an unsupervised algorithm (Kmeans)
|
||||
Set buffer size to False if you
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened RGB + downsampled NIR and SWIR
|
||||
im_pan:
|
||||
Panchromatic band
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
wl_pix: list of np.ndarray
|
||||
list of arrays containig the pixel coordinates of the water line
|
||||
buffer_size: int or False
|
||||
radius of the disk used to create a buffer around the water line
|
||||
when False, the entire image is considered for kmeans
|
||||
min_beach_size: int
|
||||
minimum number of connected pixels belonging to a single beach
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
im_sand: np.ndarray
|
||||
2D binary image containing True where sand pixels are located
|
||||
|
||||
"""
|
||||
# reshape the 2D images into vectors
|
||||
vec_ms_ps = im_ms_ps.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1], im_ms_ps.shape[2])
|
||||
vec_pan = im_pan.reshape(im_pan.shape[0]*im_pan.shape[1])
|
||||
vec_mask = cloud_mask.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
# add B,G,R,NIR and pan bands to the vector of features
|
||||
vec_features = np.zeros((vec_ms_ps.shape[0], 5))
|
||||
vec_features[:,[0,1,2,3]] = vec_ms_ps[:,[0,1,2,3]]
|
||||
vec_features[:,4] = vec_pan
|
||||
|
||||
if buffer_size:
|
||||
# create binary image with ones where the detected water lines is
|
||||
im_buffer = np.zeros((im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||||
for i, contour in enumerate(wl_pix):
|
||||
indices = [(int(_[0]), int(_[1])) for _ in list(np.round(contour))]
|
||||
for j, idx in enumerate(indices):
|
||||
im_buffer[idx] = 1
|
||||
# perform a dilation on the binary image
|
||||
se = morphology.disk(buffer_size)
|
||||
im_buffer = morphology.binary_dilation(im_buffer, se)
|
||||
vec_buffer = (im_buffer == 1).reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
else:
|
||||
vec_buffer = np.ones((vec_pan.shape[0]))
|
||||
# add cloud mask to buffer
|
||||
vec_buffer= np.logical_and(vec_buffer, ~vec_mask)
|
||||
# perform kmeans (6 clusters)
|
||||
kmeans = KMeans(n_clusters=6, random_state=0).fit(vec_features[vec_buffer,:])
|
||||
labels = np.ones((len(vec_mask))) * np.nan
|
||||
labels[vec_buffer] = kmeans.labels_
|
||||
im_labels = labels.reshape(im_ms_ps.shape[0], im_ms_ps.shape[1])
|
||||
# find the class with maximum reflection in the B,G,R,Pan
|
||||
im_sand = im_labels == np.argmax(np.mean(kmeans.cluster_centers_[:,[0,1,2,4]], axis=1))
|
||||
im_sand = morphology.remove_small_objects(im_sand, min_size=min_beach_size, connectivity=2)
|
||||
# im_sand = morphology.binary_dilation(im_sand, morphology.disk(1))
|
||||
|
||||
if plot_bool:
|
||||
im = np.copy(im_ms_ps)
|
||||
im[im_sand,0] = 0
|
||||
im[im_sand,1] = 0
|
||||
im[im_sand,2] = 1
|
||||
plt.figure()
|
||||
plt.imshow(im[:,:,[2,1,0]])
|
||||
plt.axis('image')
|
||||
plt.title('Sand classification')
|
||||
plt.show()
|
||||
|
||||
return im_sand
|
@ -0,0 +1,883 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Thu Mar 1 11:20:35 2018
|
||||
|
||||
@author: z5030440
|
||||
"""
|
||||
|
||||
"""This script contains the functions needed for satellite derived shoreline (SDS) extraction"""
|
||||
|
||||
# Initial settings
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import tempfile
|
||||
from urllib.request import urlretrieve
|
||||
import zipfile
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.cluster import KMeans
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.externals import joblib
|
||||
|
||||
|
||||
# import own modules
|
||||
from functions.utils import *
|
||||
|
||||
|
||||
# Download from ee server function
|
||||
|
||||
def download_tif(image, polygon, bandsId):
|
||||
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
|
||||
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
|
||||
'image': image.serialize(),
|
||||
'region': polygon,
|
||||
'bands': bandsId,
|
||||
'filePerBand': 'false',
|
||||
'name': 'data',
|
||||
}))
|
||||
local_zip, headers = urlretrieve(url)
|
||||
with zipfile.ZipFile(local_zip) as local_zipfile:
|
||||
return local_zipfile.extract('data.tif', tempfile.mkdtemp())
|
||||
|
||||
def load_image(image, polygon, bandsId):
|
||||
"""
|
||||
Loads an ee.Image() as a np.array. e.Image() is retrieved from the EE database.
|
||||
The geographic area and bands to select can be specified
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
image: ee.Image()
|
||||
image objec from the EE database
|
||||
polygon: list
|
||||
coordinates of the points creating a polygon. Each point is a list with 2 values
|
||||
bandsId: list
|
||||
bands to select, each band is a dictionnary in the list containing the following keys:
|
||||
crs, crs_transform, data_type and id. NOTE: you have to remove the key dimensions, otherwise
|
||||
the entire image is retrieved.
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
image_array : np.ndarray
|
||||
An array containing the image (2D if one band, otherwise 3D)
|
||||
georef : np.ndarray
|
||||
6 element vector containing the crs_parameters
|
||||
[X_ul_corner Xscale Xshear Y_ul_corner Yshear Yscale]
|
||||
"""
|
||||
|
||||
local_tif_filename = download_tif(image, polygon, bandsId)
|
||||
dataset = gdal.Open(local_tif_filename, gdal.GA_ReadOnly)
|
||||
georef = np.array(dataset.GetGeoTransform())
|
||||
bands = [dataset.GetRasterBand(i + 1).ReadAsArray() for i in range(dataset.RasterCount)]
|
||||
return np.stack(bands, 2), georef
|
||||
|
||||
def create_cloud_mask(im_qa, satname, plot_bool):
|
||||
"""
|
||||
Creates a cloud mask from the image containing the QA band information
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_qa: np.ndarray
|
||||
Image containing the QA band
|
||||
satname: string
|
||||
short name for the satellite (L8, L7, S2)
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
cloud_mask : np.ndarray of booleans
|
||||
A boolean array with True where the cloud are present
|
||||
"""
|
||||
|
||||
# convert QA bits
|
||||
if satname == 'L8':
|
||||
cloud_values = [2800, 2804, 2808, 2812, 6896, 6900, 6904, 6908]
|
||||
elif satname == 'L7':
|
||||
cloud_values = [752, 756, 760, 764]
|
||||
|
||||
cloud_mask = np.isin(im_qa, cloud_values)
|
||||
# remove isolated cloud pixels (there are some in the swash and they cause problems)
|
||||
if sum(sum(cloud_mask)) > 0:
|
||||
morphology.remove_small_objects(cloud_mask, min_size=10, connectivity=1, in_place=True)
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
plt.imshow(cloud_mask, cmap='gray')
|
||||
plt.draw()
|
||||
|
||||
#cloud_shadow_values = [2976, 2980, 2984, 2988, 3008, 3012, 3016, 3020]
|
||||
#cloud_shadow_mask = np.isin(im_qa, cloud_shadow_values)
|
||||
|
||||
return cloud_mask
|
||||
|
||||
def read_eeimage(im, polygon, sat_name, plot_bool):
|
||||
"""
|
||||
Read an ee.Image() object and returns the panchromatic band, multispectral bands (B, G, R, NIR, SWIR)
|
||||
and a cloud mask. All outputs are at 15m resolution (bilinear interpolation for the multispectral bands)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im: ee.Image()
|
||||
Image to read from the Google Earth Engine database
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_pan: np.ndarray (2D)
|
||||
The panchromatic band (15m)
|
||||
im_ms: np.ndarray (3D)
|
||||
The multispectral bands interpolated at 15m
|
||||
im_cloud: np.ndarray (2D)
|
||||
The cloud mask at 15m
|
||||
crs_params: list
|
||||
EPSG code and affine transformation parameters
|
||||
"""
|
||||
|
||||
im_dic = im.getInfo()
|
||||
# save metadata
|
||||
im_meta = im_dic.get('properties')
|
||||
meta = {'timestamp':im_meta['system:time_start'],
|
||||
'date_acquired':im_meta['DATE_ACQUIRED'],
|
||||
'geom_rmse_model':im_meta['GEOMETRIC_RMSE_MODEL'],
|
||||
'gcp_model':im_meta['GROUND_CONTROL_POINTS_MODEL'],
|
||||
'quality':im_meta['IMAGE_QUALITY_OLI'],
|
||||
'sun_azimuth':im_meta['SUN_AZIMUTH'],
|
||||
'sun_elevation':im_meta['SUN_ELEVATION']}
|
||||
|
||||
im_bands = im_dic.get('bands')
|
||||
|
||||
# delete dimensions key from dictionnary, otherwise the entire image is extracted
|
||||
for i in range(len(im_bands)): del im_bands[i]['dimensions']
|
||||
|
||||
# load panchromatic band
|
||||
pan_band = [im_bands[7]]
|
||||
im_pan, crs_pan = load_image(im, polygon, pan_band)
|
||||
im_pan = im_pan[:,:,0]
|
||||
|
||||
# load the multispectral bands (B2,B3,B4,B5,B6) = (blue,green,red,nir,swir1)
|
||||
ms_bands = [im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[5]]
|
||||
im_ms_30m, crs_ms = load_image(im, polygon, ms_bands)
|
||||
|
||||
# create cloud mask
|
||||
qa_band = [im_bands[11]]
|
||||
im_qa, crs_qa = load_image(im, polygon, qa_band)
|
||||
im_qa = im_qa[:,:,0]
|
||||
im_cloud = create_cloud_mask(im_qa, sat_name, plot_bool)
|
||||
im_cloud = transform.resize(im_cloud, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True, mode='constant').astype('bool_')
|
||||
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms_30m,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
|
||||
# check if -inf values (means out of image) and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
im_cloud = np.logical_or(np.logical_or(im_cloud, im_inf), im_nan)
|
||||
|
||||
# get the crs parameters for the image at 15m and 30m resolution
|
||||
crs = {'crs_15m':crs_pan, 'crs_30m':crs_ms, 'epsg_code':int(pan_band[0]['crs'][5:])}
|
||||
|
||||
if plot_bool:
|
||||
|
||||
# if there are -inf in the image, set them to 0 before plotting
|
||||
if sum(sum(np.isin(im_ms_30m[:,:,0], -np.inf).astype(int))) > 0:
|
||||
idx = np.isin(im_ms_30m[:,:,0], -np.inf)
|
||||
im_ms_30m[idx,0] = 0; im_ms_30m[idx,1] = 0; im_ms_30m[idx,2] = 0;
|
||||
im_ms_30m[idx,3] = 0; im_ms_30m[idx,4] = 0
|
||||
|
||||
plt.figure()
|
||||
|
||||
plt.subplot(221)
|
||||
plt.imshow(im_pan, cmap='gray')
|
||||
plt.title('PANCHROMATIC')
|
||||
|
||||
plt.subplot(222)
|
||||
plt.imshow(im_ms_30m[:,:,[2,1,0]])
|
||||
plt.title('RGB')
|
||||
|
||||
|
||||
plt.subplot(223)
|
||||
plt.imshow(im_ms_30m[:,:,3], cmap='gray')
|
||||
plt.title('NIR')
|
||||
|
||||
plt.subplot(224)
|
||||
plt.imshow(im_ms_30m[:,:,4], cmap='gray')
|
||||
plt.title('SWIR')
|
||||
|
||||
plt.show()
|
||||
|
||||
return im_pan, im_ms, im_cloud, crs, meta
|
||||
|
||||
|
||||
def rescale_image_intensity(im, cloud_mask, prob_high, plot_bool):
|
||||
"""
|
||||
Rescales the intensity of an image (multispectral or single band) by applying
|
||||
a cloud mask and clipping the prob_high upper percentile. This functions allows
|
||||
to stretch the contrast of an image.
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im: np.ndarray
|
||||
Image to rescale, can be 3D (multispectral) or 2D (single band)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
prob_high: float
|
||||
probability of exceedence used to calculate the upper percentile
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_adj: np.ndarray
|
||||
The rescaled image
|
||||
"""
|
||||
prc_low = 0 # lower percentile
|
||||
vec_mask = cloud_mask.reshape(im.shape[0] * im.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
|
||||
if len(im.shape) > 2:
|
||||
vec = im.reshape(im.shape[0] * im.shape[1], im.shape[2])
|
||||
vec_adj = np.ones((len(vec_mask), im.shape[2])) * np.nan
|
||||
|
||||
for i in range(im.shape[2]):
|
||||
prc_high = np.percentile(vec[~vec_mask, i], prob_high)
|
||||
vec_rescaled = exposure.rescale_intensity(vec[~vec_mask, i], in_range=(prc_low, prc_high))
|
||||
vec_adj[~vec_mask,i] = vec_rescaled
|
||||
|
||||
if plot_bool:
|
||||
plt.subplot(np.floor(im.shape[2]/2) + 1, np.floor(im.shape[2]/2), i+1)
|
||||
plt.hist(vec[~vec_mask, i], bins=200, label='original')
|
||||
plt.hist(vec_rescaled, bins=200, alpha=0.5, label='rescaled')
|
||||
plt.legend()
|
||||
plt.title('Band' + str(i+1))
|
||||
plt.show()
|
||||
|
||||
im_adj = vec_adj.reshape(im.shape[0], im.shape[1], im.shape[2])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_adj[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('Rescaled')
|
||||
plt.show()
|
||||
|
||||
else:
|
||||
vec = im.reshape(im.shape[0] * im.shape[1])
|
||||
vec_adj = np.ones(len(vec_mask)) * np.nan
|
||||
prc_high = np.percentile(vec[~vec_mask], prob_high)
|
||||
vec_rescaled = exposure.rescale_intensity(vec[~vec_mask], in_range=(prc_low, prc_high))
|
||||
vec_adj[~vec_mask] = vec_rescaled
|
||||
|
||||
if plot_bool:
|
||||
plt.hist(vec[~vec_mask], bins=200, label='original')
|
||||
plt.hist(vec_rescaled, bins=200, alpha=0.5, label='rescaled')
|
||||
plt.legend()
|
||||
plt.title('Single band')
|
||||
plt.show()
|
||||
|
||||
im_adj = vec_adj.reshape(im.shape[0], im.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im, cmap='gray')
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im_adj, cmap='gray')
|
||||
plt.axis('off')
|
||||
plt.title('Rescaled')
|
||||
plt.show()
|
||||
|
||||
return im_adj
|
||||
|
||||
|
||||
def hist_match(source, template):
|
||||
"""
|
||||
Adjust the pixel values of a grayscale image such that its histogram
|
||||
matches that of a target image
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
source: np.ndarray
|
||||
Image to transform; the histogram is computed over the flattened
|
||||
array
|
||||
template: np.ndarray
|
||||
Template image; can have different dimensions to source
|
||||
Returns:
|
||||
-----------
|
||||
matched: np.ndarray
|
||||
The transformed output image
|
||||
"""
|
||||
|
||||
oldshape = source.shape
|
||||
source = source.ravel()
|
||||
template = template.ravel()
|
||||
|
||||
# get the set of unique pixel values and their corresponding indices and
|
||||
# counts
|
||||
s_values, bin_idx, s_counts = np.unique(source, return_inverse=True,
|
||||
return_counts=True)
|
||||
t_values, t_counts = np.unique(template, return_counts=True)
|
||||
|
||||
# take the cumsum of the counts and normalize by the number of pixels to
|
||||
# get the empirical cumulative distribution functions for the source and
|
||||
# template images (maps pixel value --> quantile)
|
||||
s_quantiles = np.cumsum(s_counts).astype(np.float64)
|
||||
s_quantiles /= s_quantiles[-1]
|
||||
t_quantiles = np.cumsum(t_counts).astype(np.float64)
|
||||
t_quantiles /= t_quantiles[-1]
|
||||
|
||||
# interpolate linearly to find the pixel values in the template image
|
||||
# that correspond most closely to the quantiles in the source image
|
||||
interp_t_values = np.interp(s_quantiles, t_quantiles, t_values)
|
||||
|
||||
return interp_t_values[bin_idx].reshape(oldshape)
|
||||
|
||||
def pansharpen(im_ms, im_pan, cloud_mask, plot_bool):
|
||||
"""
|
||||
Pansharpens a multispectral image (3D), using the panchromatic band (2D)
|
||||
and a cloud mask
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms: np.ndarray
|
||||
Multispectral image to pansharpen (3D)
|
||||
im_pan: np.ndarray
|
||||
Panchromatic band (2D)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened multisoectral image (3D)
|
||||
"""
|
||||
|
||||
# reshape image into vector and apply cloud mask
|
||||
vec = im_ms.reshape(im_ms.shape[0] * im_ms.shape[1], im_ms.shape[2])
|
||||
vec_mask = cloud_mask.reshape(im_ms.shape[0] * im_ms.shape[1])
|
||||
vec = vec[~vec_mask, :]
|
||||
# apply PCA to RGB bands
|
||||
pca = decomposition.PCA()
|
||||
vec_pcs = pca.fit_transform(vec)
|
||||
# replace 1st PC with pan band (after matching histograms)
|
||||
vec_pan = im_pan.reshape(im_pan.shape[0] * im_pan.shape[1])
|
||||
vec_pan = vec_pan[~vec_mask]
|
||||
vec_pcs[:,0] = hist_match(vec_pan, vec_pcs[:,0])
|
||||
vec_ms_ps = pca.inverse_transform(vec_pcs)
|
||||
|
||||
# reshape vector into image
|
||||
vec_ms_ps_full = np.ones((len(vec_mask), im_ms.shape[2])) * np.nan
|
||||
vec_ms_ps_full[~vec_mask,:] = vec_ms_ps
|
||||
im_ms_ps = vec_ms_ps_full.reshape(im_ms.shape[0], im_ms.shape[1], im_ms.shape[2])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(rescale_image_intensity(im_ms[:,:,[2,1,0]], cloud_mask, 99.9, False))
|
||||
plt.axis('off')
|
||||
plt.title('Original')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 99.9, False))
|
||||
plt.axis('off')
|
||||
plt.title('Pansharpened')
|
||||
plt.show()
|
||||
|
||||
return im_ms_ps
|
||||
|
||||
def nd_index(im1, im2, cloud_mask, plot_bool):
|
||||
"""
|
||||
Computes normalised difference index on 2 images (2D), given a cloud mask (2D)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im1, im2: np.ndarray
|
||||
Images (2D) with which to calculate the ND index
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
im_nd: np.ndarray
|
||||
|
||||
Image (2D) containing the ND index
|
||||
"""
|
||||
vec_mask = cloud_mask.reshape(im1.shape[0] * im1.shape[1])
|
||||
vec_nd = np.ones(len(vec_mask)) * np.nan
|
||||
vec1 = im1.reshape(im1.shape[0] * im1.shape[1])
|
||||
vec2 = im2.reshape(im2.shape[0] * im2.shape[1])
|
||||
temp = np.divide(vec1[~vec_mask] - vec2[~vec_mask],
|
||||
vec1[~vec_mask] + vec2[~vec_mask])
|
||||
vec_nd[~vec_mask] = temp
|
||||
im_nd = vec_nd.reshape(im1.shape[0], im1.shape[1])
|
||||
|
||||
if plot_bool:
|
||||
plt.figure()
|
||||
plt.imshow(im_nd, cmap='seismic')
|
||||
plt.colorbar()
|
||||
plt.title('Normalised index')
|
||||
plt.show()
|
||||
|
||||
return im_nd
|
||||
|
||||
def find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool):
|
||||
"""
|
||||
Finds the water line by thresholding the Normalized Difference Water Index and applying the Marching
|
||||
Squares Algorithm
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ndwi: np.ndarray
|
||||
Image (2D) with the NDWI (water index)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
min_contour_points: int
|
||||
minimum number of points in each contour line
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
contours_wl: list of np.arrays
|
||||
contains the (row,column) coordinates of the contour lines
|
||||
|
||||
"""
|
||||
|
||||
# reshape image to vector
|
||||
vec_ndwi = im_ndwi.reshape(im_ndwi.shape[0] * im_ndwi.shape[1])
|
||||
vec_mask = cloud_mask.reshape(cloud_mask.shape[0] * cloud_mask.shape[1])
|
||||
vec = vec_ndwi[~vec_mask]
|
||||
# apply otsu's threshold
|
||||
t_otsu = filters.threshold_otsu(vec)
|
||||
# use Marching Squares algorithm to detect contours on ndwi image
|
||||
contours = measure.find_contours(im_ndwi, t_otsu)
|
||||
# filter water lines
|
||||
contours_wl = []
|
||||
for i, contour in enumerate(contours):
|
||||
# remove contour points that are around clouds (nan values)
|
||||
if np.any(np.isnan(contour)):
|
||||
index_nan = np.where(np.isnan(contour))[0]
|
||||
contour = np.delete(contour, index_nan, axis=0)
|
||||
# remove contours that have only few points (less than min_contour_points)
|
||||
if contour.shape[0] > min_contour_points:
|
||||
contours_wl.append(contour)
|
||||
|
||||
if plot_bool:
|
||||
# plot otsu's histogram segmentation
|
||||
plt.figure()
|
||||
vals = plt.hist(vec, bins=200)
|
||||
plt.plot([t_otsu, t_otsu],[0, np.max(vals[0])], 'r-', label='Otsu threshold')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
# plot the water line contours on top of water index
|
||||
plt.figure()
|
||||
plt.imshow(im_ndwi, cmap='seismic')
|
||||
plt.colorbar()
|
||||
for i,contour in enumerate(contours_wl): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
plt.axis('image')
|
||||
plt.title('Detected water lines')
|
||||
plt.show()
|
||||
|
||||
return contours_wl
|
||||
|
||||
def convert_pix2world(points, crs_vec):
|
||||
"""
|
||||
Converts pixel coordinates (row,columns) to world projected coordinates
|
||||
performing an affine transformation
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
points: np.ndarray or list of np.ndarray
|
||||
array with 2 columns (rows first and columns second)
|
||||
crs_vec: np.ndarray
|
||||
vector of 6 elements [Xtr, Xscale, Xshear, Ytr, Yshear, Yscale]
|
||||
|
||||
Returns: -----------
|
||||
points_converted: np.ndarray or list of np.ndarray
|
||||
converted coordinates, first columns with X and second column with Y
|
||||
|
||||
"""
|
||||
|
||||
# make affine transformation matrix
|
||||
aff_mat = np.array([[crs_vec[1], crs_vec[2], crs_vec[0]],
|
||||
[crs_vec[4], crs_vec[5], crs_vec[3]],
|
||||
[0, 0, 1]])
|
||||
# create affine transformation
|
||||
tform = transform.AffineTransform(aff_mat)
|
||||
|
||||
if type(points) is list:
|
||||
points_converted = []
|
||||
# iterate over the list
|
||||
for i, arr in enumerate(points):
|
||||
tmp = arr[:,[1,0]]
|
||||
points_converted.append(tform(tmp))
|
||||
|
||||
elif type(points) is np.ndarray:
|
||||
tmp = points[:,[1,0]]
|
||||
points_converted = tform(tmp)
|
||||
|
||||
else:
|
||||
print('invalid input type')
|
||||
raise
|
||||
|
||||
return points_converted
|
||||
|
||||
def convert_epsg(points, epsg_in, epsg_out):
|
||||
"""
|
||||
Converts from one spatial reference to another using the epsg codes
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
points: np.ndarray or list of np.ndarray
|
||||
array with 2 columns (rows first and columns second)
|
||||
epsg_in: int
|
||||
epsg code of the spatial reference in which the input is
|
||||
epsg_out: int
|
||||
epsg code of the spatial reference in which the output will be
|
||||
|
||||
Returns: -----------
|
||||
points_converted: np.ndarray or list of np.ndarray
|
||||
converted coordinates
|
||||
|
||||
"""
|
||||
|
||||
# define input and output spatial references
|
||||
inSpatialRef = osr.SpatialReference()
|
||||
inSpatialRef.ImportFromEPSG(epsg_in)
|
||||
outSpatialRef = osr.SpatialReference()
|
||||
outSpatialRef.ImportFromEPSG(epsg_out)
|
||||
# create a coordinates transform
|
||||
coordTransform = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
|
||||
# transform points
|
||||
if type(points) is list:
|
||||
points_converted = []
|
||||
# iterate over the list
|
||||
for i, arr in enumerate(points):
|
||||
points_converted.append(np.array(coordTransform.TransformPoints(arr)))
|
||||
|
||||
elif type(points) is np.ndarray:
|
||||
points_converted = np.array(coordTransform.TransformPoints(points))
|
||||
|
||||
else:
|
||||
print('invalid input type')
|
||||
raise
|
||||
|
||||
return points_converted
|
||||
|
||||
def classify_sand_unsupervised(im_ms_ps, im_pan, cloud_mask, wl_pix, buffer_size, min_beach_size, plot_bool):
|
||||
"""
|
||||
Classifies sand pixels using an unsupervised algorithm (Kmeans)
|
||||
Set buffer size to False if you want to classify the entire image,
|
||||
otherwise buffer size defines the buffer around the shoreline in which
|
||||
pixels are considered for classification.
|
||||
This classification is not robust and is only used to train a supervised algorithm
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened RGB + downsampled NIR and SWIR
|
||||
im_pan:
|
||||
Panchromatic band
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
wl_pix: list of np.ndarray
|
||||
list of arrays containig the pixel coordinates of the water line
|
||||
buffer_size: int or False
|
||||
radius of the disk used to create a buffer around the water line
|
||||
when False, the entire image is considered for kmeans
|
||||
min_beach_size: int
|
||||
minimum number of connected pixels belonging to a single beach
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
im_sand: np.ndarray
|
||||
2D binary image containing True where sand pixels are located
|
||||
|
||||
"""
|
||||
# reshape the 2D images into vectors
|
||||
vec_ms_ps = im_ms_ps.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1], im_ms_ps.shape[2])
|
||||
vec_pan = im_pan.reshape(im_pan.shape[0]*im_pan.shape[1])
|
||||
vec_mask = cloud_mask.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
# add B,G,R,NIR and pan bands to the vector of features
|
||||
vec_features = np.zeros((vec_ms_ps.shape[0], 5))
|
||||
vec_features[:,[0,1,2,3]] = vec_ms_ps[:,[0,1,2,3]]
|
||||
vec_features[:,4] = vec_pan
|
||||
|
||||
if buffer_size:
|
||||
# create binary image with ones where the detected water lines is
|
||||
im_buffer = np.zeros((im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||||
for i, contour in enumerate(wl_pix):
|
||||
indices = [(int(_[0]), int(_[1])) for _ in list(np.round(contour))]
|
||||
for j, idx in enumerate(indices):
|
||||
im_buffer[idx] = 1
|
||||
# perform a dilation on the binary image
|
||||
se = morphology.disk(buffer_size)
|
||||
im_buffer = morphology.binary_dilation(im_buffer, se)
|
||||
vec_buffer = (im_buffer == 1).reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
else:
|
||||
vec_buffer = np.ones((vec_pan.shape[0]))
|
||||
# add cloud mask to buffer
|
||||
vec_buffer= np.logical_and(vec_buffer, ~vec_mask)
|
||||
# perform kmeans (6 clusters)
|
||||
kmeans = KMeans(n_clusters=6, random_state=0).fit(vec_features[vec_buffer,:])
|
||||
labels = np.ones((len(vec_mask))) * np.nan
|
||||
labels[vec_buffer] = kmeans.labels_
|
||||
im_labels = labels.reshape(im_ms_ps.shape[0], im_ms_ps.shape[1])
|
||||
# find the class with maximum reflection in the B,G,R,Pan
|
||||
im_sand = im_labels == np.argmax(np.mean(kmeans.cluster_centers_[:,[0,1,2,4]], axis=1))
|
||||
im_sand = morphology.remove_small_objects(im_sand, min_size=min_beach_size, connectivity=2)
|
||||
im_sand = morphology.binary_erosion(im_sand, morphology.disk(1))
|
||||
# im_sand = morphology.binary_dilation(im_sand, morphology.disk(1))
|
||||
|
||||
if plot_bool:
|
||||
im = np.copy(rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 99.9, False))
|
||||
im[im_sand,0] = 0
|
||||
im[im_sand,1] = 0
|
||||
im[im_sand,2] = 1
|
||||
plt.figure()
|
||||
plt.imshow(im)
|
||||
plt.axis('image')
|
||||
plt.title('Sand classification')
|
||||
plt.show()
|
||||
|
||||
return im_sand
|
||||
|
||||
def classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool):
|
||||
"""
|
||||
Classifies every pixel in the image in one of 4 classes:
|
||||
- sand --> label = 1
|
||||
- whitewater (breaking waves and swash) --> label = 2
|
||||
- water --> label = 3
|
||||
- other (vegetation, buildings, rocks...) --> label = 0
|
||||
|
||||
The classifier is a Neural Network, trained with 7000 pixels for the class SAND and 1500 pixels for
|
||||
each of the other classes. This is because the class of interest for my application is SAND and I
|
||||
wanted to minimize the classification error for that class
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened RGB + downsampled NIR and SWIR
|
||||
im_pan:
|
||||
Panchromatic band
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
im_classif: np.ndarray
|
||||
2D image containing labels
|
||||
im_labels: np.ndarray of booleans
|
||||
3D image containing a boolean image for each class (im_classif == label)
|
||||
|
||||
"""
|
||||
|
||||
# load classifier
|
||||
clf = joblib.load('functions/NeuralNet_classif.pkl')
|
||||
|
||||
# calculate features
|
||||
n_features = 10
|
||||
im_features = np.zeros((im_ms_ps.shape[0], im_ms_ps.shape[1], n_features))
|
||||
im_features[:,:,[0,1,2,3,4]] = im_ms_ps
|
||||
im_features[:,:,5] = im_pan
|
||||
im_features[:,:,6] = nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, False) # (NIR-G)
|
||||
im_features[:,:,7] = nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,2], cloud_mask, False) # ND(NIR-R)
|
||||
im_features[:,:,8] = nd_index(im_ms_ps[:,:,0], im_ms_ps[:,:,2], cloud_mask, False) # ND(B-R)
|
||||
im_features[:,:,9] = nd_index(im_ms_ps[:,:,4], im_ms_ps[:,:,1], cloud_mask, False) # ND(SWIR-G)
|
||||
# remove NaNs and clouds
|
||||
vec_features = im_features.reshape((im_ms_ps.shape[0] * im_ms_ps.shape[1], n_features))
|
||||
vec_cloud = cloud_mask.reshape(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
vec_nan = np.any(np.isnan(vec_features), axis=1)
|
||||
vec_mask = np.logical_or(vec_cloud, vec_nan)
|
||||
vec_features = vec_features[~vec_mask, :]
|
||||
# predict with NN classifier
|
||||
labels = clf.predict(vec_features)
|
||||
# recompose image
|
||||
vec_classif = np.zeros((cloud_mask.shape[0]*cloud_mask.shape[1]))
|
||||
vec_classif[~vec_mask] = labels
|
||||
im_classif = vec_classif.reshape((im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||||
|
||||
# labels
|
||||
im_sand = im_classif == 1
|
||||
im_sand = morphology.remove_small_objects(im_sand, min_size=min_beach_size, connectivity=2)
|
||||
im_swash = im_classif == 2
|
||||
im_water = im_classif == 3
|
||||
im_labels = np.stack((im_sand,im_swash,im_water), axis=-1)
|
||||
|
||||
if plot_bool:
|
||||
# display on top of pansharpened RGB
|
||||
im_display = rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 99.9, False)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
|
||||
plt.figure()
|
||||
ax1 = plt.subplot(121)
|
||||
plt.imshow(im_display)
|
||||
plt.axis('off')
|
||||
plt.title('Image')
|
||||
ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
plt.imshow(im)
|
||||
plt.axis('off')
|
||||
plt.title('NN classifier')
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
|
||||
return im_classif, im_labels
|
||||
|
||||
def find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool):
|
||||
"""
|
||||
New mthod for extracting shorelines (more robust)
|
||||
|
||||
KV WRL 2018
|
||||
|
||||
Arguments:
|
||||
-----------
|
||||
im_ms_ps: np.ndarray
|
||||
Pansharpened RGB + downsampled NIR and SWIR
|
||||
im_labels: np.ndarray
|
||||
3D image containing a boolean image for each class in the order (sand, swash, water)
|
||||
cloud_mask: np.ndarray
|
||||
2D cloud mask with True where cloud pixels are
|
||||
buffer_size: int
|
||||
size of the buffer around the sandy beach
|
||||
plot_bool: boolean
|
||||
True if plot is wanted
|
||||
|
||||
Returns: -----------
|
||||
contours_wi: list of np.arrays
|
||||
contains the (row,column) coordinates of the contour lines extracted with the Water Index
|
||||
contours_mwi: list of np.arrays
|
||||
contains the (row,column) coordinates of the contour lines extracted with the Modified Water Index
|
||||
|
||||
"""
|
||||
|
||||
nrows = cloud_mask.shape[0]
|
||||
ncols = cloud_mask.shape[1]
|
||||
im_display = rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 99.9, False)
|
||||
|
||||
# calculate Normalized Difference Modified Water Index (SWIR - G)
|
||||
im_mwi = nd_index(im_ms_ps[:,:,4], im_ms_ps[:,:,1], cloud_mask, False)
|
||||
# calculate Normalized Difference Modified Water Index (NIR - G)
|
||||
im_wi = nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, False)
|
||||
# stack indices together
|
||||
im_ind = np.stack((im_wi, im_mwi), axis=-1)
|
||||
vec_ind = im_ind.reshape(nrows*ncols,2)
|
||||
|
||||
# process labels
|
||||
vec_sand = im_labels[:,:,0].reshape(ncols*nrows)
|
||||
vec_swash = im_labels[:,:,1].reshape(ncols*nrows)
|
||||
vec_water = im_labels[:,:,2].reshape(ncols*nrows)
|
||||
|
||||
# create a buffer around the sandy beach
|
||||
se = morphology.disk(buffer_size)
|
||||
im_buffer = morphology.binary_dilation(im_labels[:,:,0], se)
|
||||
vec_buffer = im_buffer.reshape(nrows*ncols)
|
||||
|
||||
# select water/sand/swash pixels that are within the buffer
|
||||
int_water = vec_ind[np.logical_and(vec_buffer,vec_water),:]
|
||||
int_sand = vec_ind[np.logical_and(vec_buffer,vec_sand),:]
|
||||
int_swash = vec_ind[np.logical_and(vec_buffer,vec_swash),:]
|
||||
|
||||
# threshold the sand/water intensities
|
||||
int_all = np.append(int_water,int_sand, axis=0)
|
||||
t_mwi = filters.threshold_otsu(int_all[:,0])
|
||||
t_wi = filters.threshold_otsu(int_all[:,1])
|
||||
|
||||
# find contour with MS algorithm
|
||||
im_wi_buffer = np.copy(im_wi)
|
||||
im_wi_buffer[~im_buffer] = np.nan
|
||||
im_mwi_buffer = np.copy(im_mwi)
|
||||
im_mwi_buffer[~im_buffer] = np.nan
|
||||
contours_wi = measure.find_contours(im_wi_buffer, t_wi)
|
||||
contours_mwi = measure.find_contours(im_mwi_buffer, t_mwi)
|
||||
|
||||
if plot_bool:
|
||||
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
plt.figure()
|
||||
plt.imshow(im)
|
||||
for i,contour in enumerate(contours_wi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='g')
|
||||
plt.draw()
|
||||
|
||||
fig, ax = plt.subplots(2,1, sharex=True)
|
||||
vals = ax[0].hist(int_water[:,0], bins=100, label='water')
|
||||
ax[0].hist(int_sand[:,0], bins=100, alpha=0.5, label='sand')
|
||||
ax[0].hist(int_swash[:,0], bins=100, alpha=0.5, label='swash')
|
||||
ax[0].plot([t_wi, t_wi], [0, np.max(vals[0])], 'r-')
|
||||
ax[0].legend()
|
||||
ax[0].set_title('Water Index NIR-G')
|
||||
vals = ax[1].hist(int_water[:,1], bins=100, label='water')
|
||||
ax[1].hist(int_sand[:,1], bins=100, alpha=0.5, label='sand')
|
||||
ax[1].hist(int_swash[:,1], bins=100, alpha=0.5, label='swash')
|
||||
ax[1].plot([t_mwi, t_mwi], [0, np.max(vals[0])], 'r-')
|
||||
ax[1].legend()
|
||||
ax[1].set_title('Modified Water Index SWIR-G')
|
||||
plt.draw()
|
||||
|
||||
|
||||
return contours_wi, contours_mwi
|
||||
|
@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Thu Apr 5 16:19:31 2018
|
||||
|
||||
@author: z5030440
|
||||
"""
|
||||
|
||||
d_gt = {'arr':sl_gt}
|
||||
d_sds = {'arr':sl_sds}
|
||||
|
||||
sio.savemat('sl_gt.mat', mdict=d_gt)
|
||||
sio.savemat('sl_sds.mat', mdict=d_sds)
|
||||
|
||||
#%%
|
||||
|
||||
herror = sio.loadmat('hor_error.mat')
|
||||
|
||||
diff_p = (herror['gt_av'] - herror['sds_av'])[0,:]
|
||||
|
||||
f = plt.figure()
|
||||
plt.subplot(3,1,1)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_rmse'][0])
|
||||
plt.ylabel('rmse [m]')
|
||||
plt.xticks([])
|
||||
plt.title('Horizontal cross-shore error')
|
||||
|
||||
plt.subplot(3,1,2)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_mean'][0], color=orange)
|
||||
plt.ylabel('mean [m]')
|
||||
plt.xticks([])
|
||||
|
||||
plt.subplot(3,1,3)
|
||||
plt.bar(np.linspace(1,len(zav),len(zav)), herror['p_std'][0], color='g')
|
||||
plt.ylabel('std [m]')
|
||||
plt.xlabel('comparison #')
|
||||
plt.grid(False)
|
||||
plt.grid(axis='y')
|
||||
f.subplots_adjust(hspace=0.2)
|
||||
plt.draw()
|
@ -0,0 +1,65 @@
|
||||
close all
|
||||
clear
|
||||
clc
|
||||
|
||||
addpath(genpath('C:\Users\z5030440\Documents\GitHub\geetools\functions\xyz2spz'))
|
||||
|
||||
sl_gt = load('sl_gt.mat')
|
||||
sl_sds = load('sl_sds.mat')
|
||||
|
||||
sl_sds = sl_sds.arr
|
||||
sl_gt = sl_gt.arr
|
||||
for i = 1:length(sl_sds)
|
||||
sds.x = sl_sds{i}(:,1)
|
||||
sds.y = sl_sds{i}(:,2)
|
||||
sds.z = zeros(length(sl_sds{i}(:,1)),1)
|
||||
|
||||
gt.x = sl_gt{i}(:,1)
|
||||
gt.y = sl_gt{i}(:,2)
|
||||
gt.z = zeros(length(sl_gt{i}(:,1)),1)
|
||||
|
||||
outsds = xyz2spz(sds,'NARRA')
|
||||
outgt = xyz2spz(gt,'NARRA')
|
||||
|
||||
figure
|
||||
hold on
|
||||
grid on
|
||||
box on
|
||||
plot(outsds.s, outsds.p, 'b-', 'linewidth',2)
|
||||
plot(outgt.s, outgt.p, 'r-', 'linewidth',2)
|
||||
xlabel('s [m]')
|
||||
ylabel('p [m]')
|
||||
title('Horizontal comparison in spz coordinates')
|
||||
legend({'SDS', 'contour at tide level'})
|
||||
|
||||
xq = nanmin(outsds.s):10:nanmax(outsds.s)
|
||||
[gt_s idx_gt] = unique(outgt.s)
|
||||
gt_p = outgt.p(idx_gt)
|
||||
gt_p_int = interp1(gt_s, gt_p, xq)
|
||||
[sds_s idx_sds] = unique(outsds.s)
|
||||
sds_p = outsds.p(idx_sds)
|
||||
sds_p_int = interp1(sds_s, sds_p, xq)
|
||||
diff_p = sds_p_int - gt_p_int;
|
||||
|
||||
sds_av(i) = median(sds_p_int(~(sds_p_int > median(sds_p_int) + 2*std(sds_p_int) | sds_p_int < median(sds_p_int) - 2*std(sds_p_int))))
|
||||
gt_p_int(isnan(gt_p_int)) = []
|
||||
gt_av(i) = median(gt_p_int(~(gt_p_int > median(gt_p_int) + 2*std(gt_p_int) | gt_p_int < median(gt_p_int) - 2*std(gt_p_int))))
|
||||
|
||||
|
||||
idx_nan = isnan(diff_p)
|
||||
diff_p(idx_nan) = []
|
||||
xq(idx_nan) = []
|
||||
idx_outlier = diff_p > median(diff_p) + 2*std(diff_p) | diff_p < median(diff_p) - 2*std(diff_p)
|
||||
diff_p(idx_outlier) = []
|
||||
xq(idx_outlier) = []
|
||||
|
||||
p_rmse(i) = sqrt(mean(diff_p.^2))
|
||||
p_mean(i) = mean(diff_p)
|
||||
p_std(i) = std(diff_p)
|
||||
p_q90(i) = quantile(abs(diff_p), 0.9)
|
||||
|
||||
end
|
||||
|
||||
clearvars -except sds_av gt_av p_rmse p_mean p_std p_q90
|
||||
|
||||
save 'hor_error.mat'
|
@ -0,0 +1,213 @@
|
||||
function [res,fval,it] = muller (f,Z0,itmax,ztol,ftol,option)
|
||||
% MULLER find a zero of a real or complex function Y = F(Z)
|
||||
%
|
||||
% Syntax:
|
||||
%
|
||||
% RES = MULLER (F,Z0) find the zero of a complex or real function
|
||||
% 'F' (either an anonymous function or .m function) using three initial
|
||||
% guesses contained in the vector Z0. Muller takes the function F and
|
||||
% evaluetes it at each initial point using feval. F doesn't need to be
|
||||
% vectorized.
|
||||
% The initial guesses can be real or complex numbers close to the zero,
|
||||
% bracketing the zero is not necessary. Parameters ITMAX, ZTOL and
|
||||
% FTOL are set by default to 1000, 1e-5 and 1e-5, respectively.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX) the maximum number of iterations is set
|
||||
% equal to ITMAX. ZTOL and FTOL are set by default with the values mentioned
|
||||
% above.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL) ZTOL is used as a stopping
|
||||
% criterion. If the absolute difference between the values of Z found in
|
||||
% the two latest iterations is less than ZTOL, the program is stopped. FTOL
|
||||
% is set by default with the value mentioned above.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL) FTOL is used as a stopping
|
||||
% criterion. If the value of the function F at the Z found in the last
|
||||
% iteration is less than FTOL, the program is stopped.
|
||||
%
|
||||
% RES = MULLER (F,Z0,ITMAX,ZTOL,FTOL,'both') indicate that both
|
||||
% criteria ZTOL and FTOL must be satisfied simultaneously. By default,
|
||||
% MULLER stops if one of the two criteria is fulfilled.
|
||||
%
|
||||
% [RES,FVAL] = MULLER (F,Z0,...) return the value of the function
|
||||
% F at the Z found in the last iteration.
|
||||
%
|
||||
% [RES,FVAL,IT] = MULLER (F,Z0,...) return the number of iterations
|
||||
% used to find the zero.
|
||||
%
|
||||
% Example 1:
|
||||
% myf = @(x) (x-1)^3;
|
||||
%
|
||||
% muller(myf,[0 0.1 0.2],[],[],[],'both')
|
||||
% ans =
|
||||
% 1.0000 + 0.0000i
|
||||
%
|
||||
% Example 2:
|
||||
%
|
||||
% [res,fval,it] = muller2('cosh',[0 0.1 0.2],[],[],[],'both')
|
||||
%
|
||||
% res =
|
||||
% 0.0000 + 1.5708i
|
||||
%
|
||||
% fval =
|
||||
% 5.5845e-012 + 3.0132e-012i
|
||||
%
|
||||
% it =
|
||||
% 5
|
||||
%
|
||||
% Method taken from:
|
||||
% Numerical Recipes: The art of scientific computing
|
||||
% W.H. Press; B.P. Flannery; S.A. Teukolsky; W.T. Vetterling
|
||||
% 1986
|
||||
%
|
||||
% Thanks to John D'Errico for his helpfull review.
|
||||
%
|
||||
% Written by Daniel H. Cortes
|
||||
% MAE Department, West Virginia University
|
||||
% March, 2008.
|
||||
%
|
||||
|
||||
|
||||
%=================================================
|
||||
% Checking proper values of the input parameters
|
||||
%=================================================
|
||||
|
||||
|
||||
if nargin > 6
|
||||
error ('Too many arguments.')
|
||||
elseif nargin < 2
|
||||
error('Too few arguments.')
|
||||
end
|
||||
|
||||
if nargin < 6
|
||||
opt = 1;
|
||||
elseif ischar(option) == 1
|
||||
if size(option,2) == 4
|
||||
if sum(option == 'both') == 4
|
||||
opt = 2;
|
||||
else
|
||||
error ('Option parameter must be *both*.')
|
||||
end
|
||||
else
|
||||
error ('Option parameter must be *both*.')
|
||||
end
|
||||
else
|
||||
error ('Option parameter must be a character array (string).')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 5
|
||||
ftol = 1e-5;
|
||||
elseif isnumeric(ftol) ~= 1
|
||||
error ('FTOL must be a numeric argument.')
|
||||
elseif isempty(ftol) == 1
|
||||
ftol = 1e-5;
|
||||
elseif size(ftol,1) ~= 1 || size(ftol,2) ~= 1
|
||||
error ('FTOL cannot be an array')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 4
|
||||
ztol = 1e-5;
|
||||
elseif isnumeric(ztol) ~= 1
|
||||
error ('ZTOL must be a numeric argument.')
|
||||
elseif isempty(ztol) == 1
|
||||
ztol = 1e-5;
|
||||
elseif size(ztol,1) ~= 1 || size(ztol,2) ~= 1
|
||||
error ('ZTOL cannot be an array.')
|
||||
end
|
||||
|
||||
|
||||
if nargin < 3
|
||||
itmax = 1000;
|
||||
elseif isnumeric(itmax) ~= 1
|
||||
error ('ITMAX must be a numeric argument.')
|
||||
elseif isempty(itmax) == 1
|
||||
itmax = 1000;
|
||||
elseif size(itmax,1) ~= 1 || size(itmax,2) ~= 1
|
||||
error ('ITMAX cannot be an array.')
|
||||
end
|
||||
|
||||
|
||||
if isnumeric(Z0) ~= 1
|
||||
error ('Z0 must be a vector of three numeric arguments.')
|
||||
elseif isempty(Z0) == 1 || length(Z0) ~= 3 || min(size(Z0)) ~= 1
|
||||
error ('Z0 must be a vector of length 3 of either complex or real arguments.')
|
||||
end
|
||||
|
||||
if Z0(1)==Z0(2) || Z0(1)==Z0(3) || Z0(2)==Z0(3)
|
||||
error('The initial guesses must be different')
|
||||
end
|
||||
|
||||
%=============================
|
||||
% Begining of Muller's method
|
||||
%=============================
|
||||
|
||||
z0 = Z0(1);
|
||||
z1 = Z0(2);
|
||||
z2 = Z0(3);
|
||||
|
||||
y0 = feval ( f, z0);
|
||||
y1 = feval ( f, z1);
|
||||
y2 = feval ( f, z2);
|
||||
|
||||
for it = 1:itmax
|
||||
q = (z2 - z1)/(z1 - z0);
|
||||
A = q*y2 - q*(1+q)*y1 + q^2*y0;
|
||||
B = (2*q + 1)*y2 - (1 + q)^2*y1 + q^2*y0;
|
||||
C = (1 + q)*y2;
|
||||
|
||||
if ( A ~= 0 )
|
||||
|
||||
disc = B^2 - 4*A*C;
|
||||
|
||||
den1 = ( B + sqrt ( disc ) );
|
||||
den2 = ( B - sqrt ( disc ) );
|
||||
|
||||
if ( abs ( den1 ) < abs ( den2 ) )
|
||||
z3 = z2 - (z2 - z1)*(2*C/den2);
|
||||
else
|
||||
z3 = z2 - (z2 - z1)*(2*C/den1);
|
||||
end
|
||||
|
||||
elseif ( B ~= 0 )
|
||||
z3 = z2 - (z2 - z1)*(2*C/B);
|
||||
else
|
||||
warning('Muller Method failed to find a root. Last iteration result used as an output. Result may not be accurate')
|
||||
res = z2;
|
||||
fval = y2;
|
||||
return
|
||||
end
|
||||
|
||||
y3 = feval ( f, z3);
|
||||
|
||||
if opt == 1
|
||||
if ( abs (z3 - z2) < ztol || abs ( y3 ) < ftol )
|
||||
res = z3;
|
||||
fval = y3;
|
||||
return
|
||||
end
|
||||
else
|
||||
if ( abs (z3 - z2) < ztol && abs ( y3 ) < ftol )
|
||||
res = z3;
|
||||
fval = y3;
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
z0 = z1;
|
||||
z1 = z2;
|
||||
z2 = z3;
|
||||
|
||||
y0 = y1;
|
||||
y1 = y2;
|
||||
y2 = y3;
|
||||
|
||||
end
|
||||
|
||||
res = z2;
|
||||
fval = y2;
|
||||
%warning('Maximum number of iterations reached. Result may not be accurate')
|
||||
|
||||
|
||||
|
@ -0,0 +1,87 @@
|
||||
function out = sort_back( data, ind, dim )
|
||||
% SORT_BACK sort back data to original order
|
||||
% ind is the indexes obtained from sorting
|
||||
% dim is the sorted dimension of the data (assumed to be 1 if not specified)
|
||||
% Ex:
|
||||
% y = randn(3,4,2);
|
||||
% [y,ind] = sort(y,2);
|
||||
% do stuff with sorted y...
|
||||
% y2 = sort_back( y, ind, 2 );
|
||||
%
|
||||
% Works on arrays of any dimension
|
||||
% Also works on cellstrings (vectors)
|
||||
%
|
||||
% C = {'hello' 'yes' 'no' 'goodbye'};
|
||||
% [C,ind] = sort(C);
|
||||
% C2 = sort_back(C,ind);
|
||||
%
|
||||
% See also SORT
|
||||
|
||||
%Author Ivar Eskerud Smith
|
||||
|
||||
if size(ind)~=size(data)
|
||||
error('Different size of indexes and input data');
|
||||
end
|
||||
|
||||
if iscell(data)
|
||||
if ~any(size(data)==1)
|
||||
error('Only vectors are supported in cell sorting/back-sorting');
|
||||
end
|
||||
out=cell(size(data));
|
||||
out(ind) = data;
|
||||
return;
|
||||
end
|
||||
|
||||
if ~isnumeric(data) || ~isnumeric(ind)
|
||||
error('Inputs have to be numeric or cell');
|
||||
end
|
||||
|
||||
n=ndims(ind);
|
||||
if ~exist('dim','var')
|
||||
dim=1;
|
||||
end
|
||||
if dim>n
|
||||
error('Specified sorted dimension must be within array bounds');
|
||||
end
|
||||
|
||||
%shift array so that the sorted dim is the first dimension
|
||||
if dim~=1
|
||||
sortInd=1:1:n;sortInd(1)=dim;sortInd(dim)=1;
|
||||
data = permute(data,sortInd);
|
||||
ind = permute(ind,sortInd);
|
||||
end
|
||||
inds = repmat({1},1,n);inds{1}=':';
|
||||
if ~issorted( data(inds{:}) )
|
||||
warning('The input data is not sorted along the specified dimension');
|
||||
end
|
||||
|
||||
s = size(ind);
|
||||
nData = numel(data);
|
||||
inds = repmat({1},1,n);
|
||||
inds(1:2)={':',':'};
|
||||
shiftSize = s(1)*s(2);
|
||||
out=nan(size(data));
|
||||
|
||||
%loop all 2d arrays within nd-array
|
||||
for k=1:prod(s(3:end))
|
||||
tmpdata = data(inds{:});
|
||||
tmpind = ind(inds{:});
|
||||
|
||||
%data is shifted so that the sorted dim = 1
|
||||
for i=1:numel(tmpdata(1,:))
|
||||
out(tmpind(:,i),i) = tmpdata(:,i);
|
||||
end
|
||||
|
||||
if n>2
|
||||
%shift to next 2d array within nd-array
|
||||
shiftInds = mod((1:nData)-shiftSize-1,nData)+1;
|
||||
out=reshape(out(shiftInds),s);
|
||||
data=reshape(data(shiftInds),s);
|
||||
ind=reshape(ind(shiftInds),s);
|
||||
end
|
||||
end
|
||||
|
||||
%permute back to original order
|
||||
sortInd=1:1:ndims(out);sortInd(1)=dim;sortInd(dim)=1;
|
||||
out = permute(out,sortInd);
|
||||
|
@ -0,0 +1,117 @@
|
||||
function out = xyz2spz(xyz_data,site)
|
||||
|
||||
%function out = xyz2spz(xyz_data,site)
|
||||
%
|
||||
%Function to transform (x,y,z) coordinates on an embayed beach to alongshore - cross-shore
|
||||
%coordinates (s,p,z) using the log spiral, given by the equation
|
||||
%r = r0*exp(A*theta). A = cot(alpha).
|
||||
%
|
||||
%xyz_data is a structure containing:
|
||||
%
|
||||
%xyz_data.x
|
||||
%xyz_data.y
|
||||
%xyz_data.z
|
||||
%
|
||||
%site is the name of the structure generated from the MALT graphical user interface
|
||||
%
|
||||
%Refer to paper
|
||||
%
|
||||
%Harley, M.D. and Turner,I.L. (2007) A simple data transformation technique
|
||||
%for pre-processing survey data at embayed beaches, Coast. Eng.,
|
||||
%doi:10.1016/j.coastaleng.2007.07.001, in press.
|
||||
%
|
||||
%Created by Mitch Harley
|
||||
%8th August, 2005
|
||||
%Last Modified 4th April, 2012
|
||||
|
||||
%----------------------------------------------------------------
|
||||
%LOAD LOGSPIRAL-FIT PARAMETERS
|
||||
|
||||
eval(['load ' site ';'])
|
||||
eval(['site = ' site ';'])
|
||||
|
||||
%Define origin and A of log spiral
|
||||
origin = site.origin;
|
||||
alph = site.alpha;
|
||||
A = cot(alph*pi/180);
|
||||
r0_origin = site.r0_origin;
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
|
||||
%DO TRANSFORMATION
|
||||
|
||||
|
||||
%Points need to be sorted prior to analysis %MDH 4/4/2012
|
||||
aa = [xyz_data.x xyz_data.y xyz_data.z];
|
||||
[sorted_points,Isort] = sortrows(aa);
|
||||
|
||||
%Convert xyz coordinates to polar coordinates
|
||||
r = sqrt((sorted_points(:,1) - origin(1)).^2+(sorted_points(:,2) - origin(2)).^2);
|
||||
theta = unwrap(atan2((sorted_points(:,2)-origin(2)),(sorted_points(:,1)-origin(1))) );
|
||||
|
||||
|
||||
%Find constants delta and kappa
|
||||
delta = pi/2+acot(A)-theta; %From Equation 5
|
||||
kappa = r./(r0_origin*sin(pi/2-acot(A))); %From Equation 6
|
||||
|
||||
|
||||
%Find theta_s by solving implicitly using fzero function
|
||||
for i = 1:length(theta);
|
||||
%Use muller function in case any complex solutions
|
||||
theta_s(i,1) = muller(@(x) (x-(1/A)*log(kappa(i)*sin(delta(i)+x))),[theta(i)-pi/8 theta(i) theta(i)+pi/8]);%From Equation 6
|
||||
end
|
||||
|
||||
%plot(theta_s*180/pi)
|
||||
|
||||
%Find r_s
|
||||
r_s = r0_origin*exp(A*theta_s);%From Equation 1
|
||||
|
||||
%Find s
|
||||
lamda = r0_origin*sec(acot(A));%From Equation 8
|
||||
start_point = 0; %Can be changed to make a more suitable start point
|
||||
s = lamda*(exp(A*theta_s)-exp(A*start_point));%From Equation 8
|
||||
|
||||
%Find p
|
||||
p = r.*sin(theta-theta_s)./sin(pi/2-acot(A)); %From Equation 9
|
||||
|
||||
%Convert any complex numbers to real numbers
|
||||
p = real(p);
|
||||
s = real(s);
|
||||
|
||||
%Sort back points to get the right indices %MDH 4/4/2012
|
||||
p = sort_back(p,Isort);
|
||||
s = sort_back(s,Isort);
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
%POST-PROCESS DATA
|
||||
%s data
|
||||
|
||||
if site.reverse_s ==0
|
||||
s = s - site.startpoint;%Make minimum s == 0
|
||||
elseif site.reverse_s ==1
|
||||
s = -(s - site.startpoint);
|
||||
end
|
||||
|
||||
%p data
|
||||
if site.subtract_res ==1 %Add switch for user to subtract residuals or not - MDH 19/5/2010
|
||||
[MIN,L] = min(site.boundary.s);
|
||||
I = find(s<=MIN);
|
||||
p(I) = p(I) - site.boundary.p(L);
|
||||
[MAX,L] = max(site.boundary.s);
|
||||
I = find(s>=MAX);
|
||||
p(I) = p(I) - site.boundary.p(L);
|
||||
I = find(s>MIN&s<MAX);
|
||||
p(I) = p(I) - interp1(site.boundary.s,site.boundary.p,s(I));%Subtract logspiral errors from p data
|
||||
end
|
||||
|
||||
|
||||
if site.alpha<0
|
||||
p = -p;
|
||||
end
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
|
||||
out.s = s;
|
||||
out.p = p;
|
||||
out.z = xyz_data.z;
|
||||
|
@ -0,0 +1,166 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Make a gif of the satellite images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib
|
||||
matplotlib.use("Agg")
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.animation as manimation
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
import imageio
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA'
|
||||
sitename = 'OLDBAR_inlet'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps) # sort timestamps since images are sorted in directory
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'rb') as f:
|
||||
idx_nocloud = pickle.load(f)
|
||||
|
||||
for i in idx_nocloud:
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skipped cloud ' + str(i))
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
# idx_nocloud.append(i)
|
||||
# check if image for that date is already present and keep the one with less clouds
|
||||
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
||||
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
||||
idx_samedate = idx_samedate[0]
|
||||
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
||||
if cloud_cover >= cloud_cover_ts[idx_samedate]:
|
||||
print('skipped double ' + str(i))
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
del shorelines[idx_samedate]
|
||||
del t[idx_samedate]
|
||||
del cloud_cover_ts[idx_samedate]
|
||||
del date_acquired_ts[idx_samedate]
|
||||
print('deleted ' + str(idx_samedate))
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
# calculate NDWI
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
# detect edges
|
||||
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(wl_pix, georef)
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
||||
|
||||
# save images as png for video
|
||||
fig = plt.figure()
|
||||
plt.grid(False)
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]], animated=True)
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.title(file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10])
|
||||
plt.xticks([])
|
||||
plt.yticks([])
|
||||
plt.axis('equal')
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
plt.savefig(os.path.join(filepath,
|
||||
'plots', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.png'),
|
||||
dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots'))
|
||||
with imageio.get_writer('movie.gif', mode='I', duration=0.2) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots',filename))
|
||||
writer.append_data(image)
|
@ -0,0 +1,213 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Run Neural Network on image to extract sandy pixels
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as mpatches
|
||||
from matplotlib import gridspec
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import ee
|
||||
import pdb
|
||||
import time
|
||||
import pandas as pd
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from scipy import ndimage
|
||||
import imageio
|
||||
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.3 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 100 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 20 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA_all'
|
||||
#sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'OLDBAR_inlet'
|
||||
#sitename = 'SANDMOTOR'
|
||||
sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
n_features = 10
|
||||
train_pos = np.nan*np.ones((1,n_features))
|
||||
train_neg = np.nan*np.ones((1,n_features))
|
||||
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrow = im_pan.shape[0]
|
||||
ncol = im_pan.shape[1]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
# skip if cloud cover is more than the threshold
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
|
||||
|
||||
# fig = plt.figure()
|
||||
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
|
||||
# ax1 = plt.subplot(121)
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
# plt.imshow(im)
|
||||
# plt.axis('off')
|
||||
# plt.gcf().set_size_inches(17.99,7.55)
|
||||
# plt.tight_layout()
|
||||
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
|
||||
# plt.draw()
|
||||
|
||||
date_im = timestamps_sorted[i].strftime('%d %b %Y')
|
||||
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
|
||||
fig = plt.figure()
|
||||
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
|
||||
|
||||
ax1 = fig.add_subplot(gs[0,:])
|
||||
plt.plot(0,0,'ko',daysall,0,'ko')
|
||||
plt.plot([0,daysall],[0,0],'k-')
|
||||
plt.plot(daysnow,0,'ro')
|
||||
plt.text(0,0.05,'2013')
|
||||
plt.text(daysall,0.05,'2019')
|
||||
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
|
||||
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
|
||||
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
|
||||
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
|
||||
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
|
||||
|
||||
plt.axis('off')
|
||||
|
||||
ax2 = fig.add_subplot(gs[1,0])
|
||||
plt.imshow(im_display)
|
||||
plt.axis('off')
|
||||
plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
ax3 = fig.add_subplot(gs[1,1])
|
||||
plt.imshow(im)
|
||||
plt.axis('off')
|
||||
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
|
||||
|
||||
plt.gcf().set_size_inches(17.99,7.55)
|
||||
plt.gcf().set_tight_layout(True)
|
||||
|
||||
plt.draw()
|
||||
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
|
||||
with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
|
||||
writer.append_data(image)
|
||||
|
@ -0,0 +1,228 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Run Neural Network on image to extract sandy pixels
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.lines as mlines
|
||||
from matplotlib import gridspec
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import ee
|
||||
import pdb
|
||||
import time
|
||||
import pandas as pd
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from scipy import ndimage
|
||||
import imageio
|
||||
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.2 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 100 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 10 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA_all'
|
||||
#sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'OLDBAR_inlet'
|
||||
#sitename = 'SANDMOTOR'
|
||||
#sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
#sitename = 'BROULEE'
|
||||
sitename = 'MURI'
|
||||
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
n_features = 10
|
||||
train_pos = np.nan*np.ones((1,n_features))
|
||||
train_neg = np.nan*np.ones((1,n_features))
|
||||
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrow = im_pan.shape[0]
|
||||
ncol = im_pan.shape[1]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
# skip if cloud cover is more than the threshold
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
|
||||
if sum(sum(im_labels[:,:,0])) == 0 :
|
||||
print('skip ' + str(i) + ' - no sand')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
|
||||
|
||||
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
|
||||
|
||||
# fig = plt.figure()
|
||||
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
|
||||
# ax1 = plt.subplot(121)
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
# plt.imshow(im)
|
||||
# plt.axis('off')
|
||||
# plt.gcf().set_size_inches(17.99,7.55)
|
||||
# plt.tight_layout()
|
||||
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
|
||||
# plt.draw()
|
||||
|
||||
date_im = timestamps_sorted[i].strftime('%d %b %Y')
|
||||
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
|
||||
fig = plt.figure()
|
||||
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
|
||||
|
||||
ax1 = fig.add_subplot(gs[0,:])
|
||||
plt.plot(0,0,'ko',daysall,0,'ko')
|
||||
plt.plot([0,daysall],[0,0],'k-')
|
||||
plt.plot(daysnow,0,'ro')
|
||||
plt.text(0,0.05,'2013')
|
||||
plt.text(daysall,0.05,'2019')
|
||||
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
|
||||
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
|
||||
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
|
||||
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
|
||||
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
|
||||
|
||||
plt.axis('off')
|
||||
|
||||
ax2 = fig.add_subplot(gs[1,0])
|
||||
plt.imshow(im_display)
|
||||
plt.axis('off')
|
||||
plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
ax3 = fig.add_subplot(gs[1,1])
|
||||
plt.imshow(im)
|
||||
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
plt.axis('off')
|
||||
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
black_line = mlines.Line2D([],[],color='k',linestyle='-', label='shoreline')
|
||||
plt.legend(handles=[orange_patch,white_patch,blue_patch, black_line], bbox_to_anchor=(0.95, 0.2))
|
||||
|
||||
plt.gcf().set_size_inches(17.99,7.55)
|
||||
plt.gcf().set_tight_layout(True)
|
||||
|
||||
plt.draw()
|
||||
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
|
||||
with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
|
||||
writer.append_data(image)
|
||||
|
@ -0,0 +1,193 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Run Neural Network on image to extract sandy pixels
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.lines as mlines
|
||||
from matplotlib import gridspec
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import ee
|
||||
import pdb
|
||||
import time
|
||||
import pandas as pd
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from scipy import ndimage
|
||||
import imageio
|
||||
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 100 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 30# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 10 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA_all'
|
||||
#sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'OLDBAR_inlet'
|
||||
#sitename = 'SANDMOTOR'
|
||||
#sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
#sitename = 'BROULEE'
|
||||
sitename = 'MURI2'
|
||||
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
n_features = 10
|
||||
train_pos = np.nan*np.ones((1,n_features))
|
||||
train_neg = np.nan*np.ones((1,n_features))
|
||||
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrow = im_pan.shape[0]
|
||||
ncol = im_pan.shape[1]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
# skip if cloud cover is more than the threshold
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
# extract shorelines (old method)
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
||||
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
|
||||
date_im = timestamps_sorted[i].strftime('%d %b %Y')
|
||||
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
|
||||
fig = plt.figure()
|
||||
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
|
||||
|
||||
ax1 = fig.add_subplot(gs[0,:])
|
||||
plt.plot(0,0,'ko',daysall,0,'ko')
|
||||
plt.plot([0,daysall],[0,0],'k-')
|
||||
plt.plot(daysnow,0,'ro')
|
||||
plt.text(0,0.05,'2013')
|
||||
plt.text(daysall,0.05,'2019')
|
||||
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
|
||||
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
|
||||
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
|
||||
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
|
||||
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
|
||||
plt.axis('off')
|
||||
|
||||
# ax2 = fig.add_subplot(gs[1,0])
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
ax3 = fig.add_subplot(gs[1,:])
|
||||
plt.imshow(im_display)
|
||||
for l,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
plt.axis('off')
|
||||
|
||||
plt.gcf().set_size_inches(5.34,9.18)
|
||||
plt.gcf().set_tight_layout(True)
|
||||
|
||||
plt.draw()
|
||||
|
||||
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
|
||||
with imageio.get_writer(sitename + '_final.gif', mode='I', duration=0.6) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
|
||||
writer.append_data(image)
|
||||
|
@ -0,0 +1,227 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Run Neural Network on image to extract sandy pixels
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.lines as mlines
|
||||
from matplotlib import gridspec
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import ee
|
||||
import pdb
|
||||
import time
|
||||
import pandas as pd
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from scipy import ndimage
|
||||
import imageio
|
||||
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.2 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 100 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 20 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
#sitename = 'NARRA_all'
|
||||
sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'OLDBAR_inlet'
|
||||
#sitename = 'SANDMOTOR'
|
||||
#sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
#sitename = 'BROULEE'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
n_features = 10
|
||||
train_pos = np.nan*np.ones((1,n_features))
|
||||
train_neg = np.nan*np.ones((1,n_features))
|
||||
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrow = im_pan.shape[0]
|
||||
ncol = im_pan.shape[1]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
# skip if cloud cover is more than the threshold
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
|
||||
if sum(sum(im_labels[:,:,0])) == 0 :
|
||||
print('skip ' + str(i) + ' - no sand')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
|
||||
|
||||
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
|
||||
|
||||
# fig = plt.figure()
|
||||
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
|
||||
# ax1 = plt.subplot(121)
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
# plt.imshow(im)
|
||||
# plt.axis('off')
|
||||
# plt.gcf().set_size_inches(17.99,7.55)
|
||||
# plt.tight_layout()
|
||||
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
|
||||
# plt.draw()
|
||||
|
||||
date_im = timestamps_sorted[i].strftime('%d %b %Y')
|
||||
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
|
||||
fig = plt.figure()
|
||||
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
|
||||
|
||||
ax1 = fig.add_subplot(gs[0,:])
|
||||
plt.plot(0,0,'ko',daysall,0,'ko')
|
||||
plt.plot([0,daysall],[0,0],'k-')
|
||||
plt.plot(daysnow,0,'ro')
|
||||
plt.text(0,0.05,'2013')
|
||||
plt.text(daysall,0.05,'2019')
|
||||
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
|
||||
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
|
||||
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
|
||||
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
|
||||
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
|
||||
|
||||
plt.axis('off')
|
||||
|
||||
# ax2 = fig.add_subplot(gs[1,0])
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
ax3 = fig.add_subplot(gs[1,:])
|
||||
plt.imshow(im)
|
||||
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
plt.axis('off')
|
||||
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
black_line = mlines.Line2D([],[],color='k',linestyle='--', label='shoreline')
|
||||
plt.legend(handles=[orange_patch,white_patch,blue_patch, black_line], bbox_to_anchor=(0.6, 0.6))
|
||||
plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
plt.gcf().set_size_inches(5.34,9.18)
|
||||
plt.gcf().set_tight_layout(True)
|
||||
|
||||
plt.draw()
|
||||
plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
|
||||
plt.close()
|
||||
|
||||
# create gif
|
||||
images = []
|
||||
filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
|
||||
with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
|
||||
for filename in filenames:
|
||||
image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
|
||||
writer.append_data(image)
|
||||
|
@ -0,0 +1,229 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Run Neural Network on image to extract sandy pixels
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.lines as mlines
|
||||
from matplotlib import gridspec
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import ee
|
||||
import pdb
|
||||
import time
|
||||
import pandas as pd
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
from scipy import ndimage
|
||||
import imageio
|
||||
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.2 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 100 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 50 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
sitename = 'NARRA_all'
|
||||
#sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
#sitename = 'OLDBAR_inlet'
|
||||
#sitename = 'SANDMOTOR'
|
||||
#sitename = 'TAIRUA'
|
||||
#sitename = 'DUCK'
|
||||
#sitename = 'BROULEE'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
daysall = (datetime(2019,1,1,tzinfo=pytz.utc) - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
n_features = 10
|
||||
train_pos = np.nan*np.ones((1,n_features))
|
||||
train_neg = np.nan*np.ones((1,n_features))
|
||||
columns = ('B','G','R','NIR','SWIR','Pan','WI','VI','BR', 'mWI', 'class')
|
||||
|
||||
#%%
|
||||
for i in range(1):
|
||||
i = 156 # open (96 close)
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrow = im_pan.shape[0]
|
||||
ncol = im_pan.shape[1]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
# skip if cloud cover is more than the threshold
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
|
||||
if sum(sum(im_labels[:,:,0])) == 0 :
|
||||
print('skip ' + str(i) + ' - no sand')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, False)
|
||||
|
||||
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[0,0,204/255],[0,0,204/255]])
|
||||
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
|
||||
|
||||
# fig = plt.figure()
|
||||
# plt.suptitle(date_im, fontsize=17, fontweight='bold')
|
||||
# ax1 = plt.subplot(121)
|
||||
# plt.imshow(im_display)
|
||||
# plt.axis('off')
|
||||
# ax2 = plt.subplot(122, sharex=ax1, sharey=ax1)
|
||||
# plt.imshow(im)
|
||||
# plt.axis('off')
|
||||
# plt.gcf().set_size_inches(17.99,7.55)
|
||||
# plt.tight_layout()
|
||||
# orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
# white_patch = mpatches.Patch(color=[204/255,1,1], label='swash/whitewater')
|
||||
# blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
# plt.legend(handles=[orange_patch,white_patch,blue_patch], bbox_to_anchor=(0.95, 0.2))
|
||||
# plt.draw()
|
||||
|
||||
date_im = timestamps_sorted[i].strftime('%d %b %Y')
|
||||
daysnow = (timestamps_sorted[i] - datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds()
|
||||
|
||||
fig = plt.figure()
|
||||
gs = gridspec.GridSpec(2, 2, height_ratios=[1, 20])
|
||||
|
||||
ax1 = fig.add_subplot(gs[0,:])
|
||||
plt.plot(0,0,'ko',daysall,0,'ko')
|
||||
plt.plot([0,daysall],[0,0],'k-')
|
||||
plt.plot(daysnow,0,'ro')
|
||||
plt.text(0,0.05,'2013')
|
||||
plt.text(daysall,0.05,'2019')
|
||||
plt.plot((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.plot((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0,'ko',markersize=3)
|
||||
plt.text((datetime(2014,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2014')
|
||||
plt.text((datetime(2015,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2015')
|
||||
plt.text((datetime(2016,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2016')
|
||||
plt.text((datetime(2017,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2017')
|
||||
plt.text((datetime(2018,1,1,tzinfo=pytz.utc)- datetime(2013,1,1,tzinfo=pytz.utc)).total_seconds(),0.05,'2018')
|
||||
|
||||
plt.axis('off')
|
||||
|
||||
ax2 = fig.add_subplot(gs[1,0])
|
||||
plt.imshow(im_display)
|
||||
plt.axis('off')
|
||||
plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
ax3 = fig.add_subplot(gs[1,1], sharex=ax2, sharey=ax2)
|
||||
plt.imshow(im)
|
||||
for l,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
plt.axis('off')
|
||||
orange_patch = mpatches.Patch(color=[1,128/255,0/255], label='sand')
|
||||
blue_patch = mpatches.Patch(color=[0,0,204/255], label='water')
|
||||
black_line = mlines.Line2D([],[],color='k',linestyle='--', label='water line')
|
||||
plt.legend(handles=[orange_patch,blue_patch, black_line], bbox_to_anchor=(0.6, 0.6))
|
||||
# plt.title(date_im, fontsize=17, fontweight='bold')
|
||||
|
||||
plt.gcf().set_size_inches(11.38, 7.51)
|
||||
plt.gcf().set_tight_layout(True)
|
||||
|
||||
plt.draw()
|
||||
|
||||
# plt.savefig(os.path.join(filepath,'plots_classif', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.jpg'), dpi = 300)
|
||||
# plt.close()
|
||||
|
||||
# create gif
|
||||
#images = []
|
||||
#filenames = os.listdir(os.path.join(filepath, 'plots_classif'))
|
||||
#with imageio.get_writer(sitename + '.gif', mode='I', duration=0.4) as writer:
|
||||
# for filename in filenames:
|
||||
# image = imageio.imread(os.path.join(filepath,'plots_classif',filename))
|
||||
# writer.append_data(image)
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
import matplotlib.dates as mdates
|
||||
import matplotlib.cm as cm
|
||||
from datetime import datetime, timedelta
|
||||
import pickle
|
||||
import pytz
|
||||
import scipy.io as sio
|
||||
import scipy.interpolate as interpolate
|
||||
import statsmodels.api as sm
|
||||
import skimage.measure as measure
|
||||
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
|
||||
# load timestamps from satellite images
|
||||
satname = 'L8'
|
||||
sitename = 'OLDBAR'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
|
||||
output = pickle.load(f)
|
||||
dates_l8 = output['t']
|
||||
# convert to AEST
|
||||
dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
|
||||
# get the satellite shorelines
|
||||
sl = output['shorelines']
|
||||
|
||||
# load narrabeen beach points (manually digitized)
|
||||
with open(os.path.join(os.getcwd(), 'olddata', 'oldbar_beach' + '.pkl'), 'rb') as f:
|
||||
narrabeach = pickle.load(f)
|
||||
|
||||
|
||||
dist_thresh = 250
|
||||
frac_smooth = 1./12
|
||||
plt.figure()
|
||||
plt.axis('equal')
|
||||
for i in range(1):
|
||||
# select point of sds that are close to the manually digitized points
|
||||
idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
|
||||
plt.plot(sl[i][:,0], sl[i][:,1])
|
||||
plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1])
|
||||
|
||||
|
||||
# smooth (LOWESS) satellite shoreline
|
||||
sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 10)
|
||||
sl_smooth = sl_smooth[:,[1,0]]
|
||||
|
||||
plt.plot(sl_smooth[:,0], sl_smooth[:,1])
|
||||
|
||||
|
||||
plt.draw()
|
@ -0,0 +1,80 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Mon Mar 19 14:44:57 2018
|
||||
|
||||
@author: z5030440
|
||||
|
||||
Main code to extract shorelines from Landsat imagery
|
||||
"""
|
||||
# Preamble
|
||||
|
||||
import ee
|
||||
import math
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pdb
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.morphology as morphology
|
||||
import skimage.measure as measure
|
||||
|
||||
# my modules
|
||||
# my functions
|
||||
from functions.utils import *
|
||||
from functions.sds import *
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
plot_bool = True # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100 # minimum number of points contained in each water line
|
||||
|
||||
# select collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA')
|
||||
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
rect_narra = [[[151.317395,-33.494601],
|
||||
[151.388635,-33.495174],
|
||||
[151.363624,-33.565184],
|
||||
[151.305228,-33.563299],
|
||||
[151.317395,-33.494601]]];
|
||||
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering Narrabeen:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
output = []
|
||||
# loop through all images
|
||||
# find each image in ee database
|
||||
i = 2
|
||||
im = ee.Image(im_all[i].get('id'))
|
||||
# load image as np.array
|
||||
im_pan, im_ms, im_cloud, crs = read_eeimage(im, rect_narra, plot_bool)
|
||||
# rescale intensities
|
||||
im_ms = rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
||||
im_pan = rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
# calculate NDWI
|
||||
im_ndwi = nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
|
||||
# edge detection
|
||||
wl_pix = find_wl_contours(im_ndwi, im_cloud, min_contour_points, True)
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = convert_pix2world(wl_pix, crs['crs_15m'])
|
||||
output.append(wl_coords)
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.axis('off')
|
||||
plt.title('RGB at 15m')
|
||||
plt.show()
|
@ -0,0 +1,476 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Extract shorelines from Landsat images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import ee
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.cm as cm
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
import pickle
|
||||
import pdb
|
||||
import pytz
|
||||
from pylab import ginput
|
||||
from osgeo import gdal
|
||||
|
||||
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.morphology as morphology
|
||||
import skimage.measure as measure
|
||||
import skimage.color as color
|
||||
import skimage.feature as feature
|
||||
# machine learning modules
|
||||
from sklearn.cluster import KMeans
|
||||
|
||||
# my modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = False
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 50 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# select collection
|
||||
satname = 'L8'
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') # Landsat 8 Tier 1 TOA
|
||||
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
polygon = [[[151.3473129272461,-33.69035274454718],
|
||||
[151.2820816040039,-33.68206818063878],
|
||||
[151.27281188964844,-33.74775138989556],
|
||||
[151.3425064086914,-33.75231878701767],
|
||||
[151.3473129272461,-33.69035274454718]]];
|
||||
|
||||
# dates
|
||||
start_date = '2013-01-01'
|
||||
end_date = '2018-12-31'
|
||||
|
||||
# filter by location and date
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering the polygon:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
i = 0 # first image
|
||||
|
||||
# find image in ee database
|
||||
im = ee.Image(im_all[i].get('id'))
|
||||
|
||||
# load image as np.array
|
||||
im_pan, im_ms, im_cloud, crs, meta = sds.read_eeimage(im, polygon, satname, plot_bool)
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, im_cloud, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, im_cloud, prob_high, plot_bool)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, im_cloud, plot_bool)
|
||||
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
# calculate NDWI
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], im_cloud, plot_bool)
|
||||
|
||||
# edge detection
|
||||
wl_pix = sds.find_wl_contours(im_ndwi, im_cloud, min_contour_points, plot_bool)
|
||||
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(wl_pix, crs['crs_15m'])
|
||||
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, crs['epsg_code'], output_epsg)
|
||||
|
||||
# classify sand pixels
|
||||
im_sand = sds.classify_sand_unsupervised(im_ms_ps, im_pan, im_cloud, wl_pix, buffer_size, min_beach_size, plot_bool)
|
||||
|
||||
#pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
#if pt_in[0][0] < im_ms_ps.shape[1]/2:
|
||||
win = np.ones((3,3))
|
||||
|
||||
im_features = np.zeros((sum(sum(im_sand)), 20))
|
||||
im_features[:,[0,1,2,3,4]] = im_ms_ps[im_sand,:] # B G R NIR SWIR
|
||||
im_features[:,5] = im_pan[im_sand] # Pan
|
||||
im_features[:,6] = im_ndwi[im_sand] # NDWI
|
||||
im_features[:,7] = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,2], im_cloud, False)[im_sand] # NDVI
|
||||
im_features[:,8] = sds.nd_index(im_ms_ps[:,:,0], im_ms_ps[:,:,2], im_cloud, False)[im_sand] # ND Blue - Red
|
||||
for i in range(9):
|
||||
im_features[:,i+9] = ndimage.generic_filter(im_features[:,i], np.std, footprint = win)
|
||||
|
||||
|
||||
im_ms_ps[im_sand,:]
|
||||
|
||||
im_grey = color.rgb2grey(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.figure()
|
||||
plt.imshow(im_grey, cmap='gray')
|
||||
plt.draw()
|
||||
|
||||
counts, bins = np.histogram(im_grey[~im_cloud], bins=255)
|
||||
im_grey_d = np.digitize(im_grey, bins=bins) - 1
|
||||
|
||||
from scipy import ndimage
|
||||
|
||||
varianceMatrix1 = ndimage.generic_filter(im_grey_d, np.max, footprint = np.ones((3,3)))
|
||||
varianceMatrix2 = ndimage.generic_filter(im_grey_d, np.min, footprint = np.ones((3,3)))
|
||||
varianceMatrix = varianceMatrix1 - varianceMatrix2
|
||||
im_grey = color.rgb2grey(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.figure()
|
||||
plt.imshow(varianceMatrix, cmap='gray')
|
||||
plt.draw()
|
||||
#%%
|
||||
data = gdal.Open('l8_test.tif', gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im = np.stack(bands, 2)
|
||||
|
||||
im_test = im[:,:,3]
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_test, cmap='gray')
|
||||
plt.axis('image')
|
||||
plt.draw()
|
||||
|
||||
im_stats = np.zeros((im_test.shape[0], im_test.shape[1], 6))
|
||||
winsize = 5
|
||||
prop_names = ('contrast', 'dissimilarity', 'homogeneity', 'energy', 'correlation', 'ASM')
|
||||
for i in range(im_test.shape[0]):
|
||||
print(int(np.round(100*i/im_test.shape[0])))
|
||||
for j in range(im_test.shape[1]):
|
||||
#windows needs to fit completely in image
|
||||
if i <2 or j <2:
|
||||
continue
|
||||
if i > (im_test.shape[0] - 3) or j > (im_test.shape[1] - 3):
|
||||
continue
|
||||
|
||||
#Calculate GLCM on a 3x3 window
|
||||
glcm_window = im_test[i-2: i+3, j-2 : j+3]
|
||||
glcm = feature.greycomatrix(glcm_window, [1], [0], symmetric = True, normed = True )
|
||||
|
||||
#Calculate contrast and replace center pixel
|
||||
for k in range(6): im_stats[i,j,k] = feature.greycoprops(glcm, prop_names[k])
|
||||
|
||||
plt.figure()
|
||||
for i in range(6):
|
||||
plt.subplot(2,3,i+1)
|
||||
plt.imshow(im_stats[:,:,i], cmap='jet')
|
||||
plt.axis('image')
|
||||
plt.title(prop_names[i])
|
||||
plt.draw()
|
||||
|
||||
pixel_loc = [200, 200]
|
||||
im_stats[pixel_loc[0], pixel_loc[1], 3]
|
||||
#%%
|
||||
|
||||
for i in range(im_grey_d.shape[0]):
|
||||
print(int(np.round(100*i/im_grey_d.shape[0])))
|
||||
for j in range(im_grey_d.shape[1]):
|
||||
#windows needs to fit completely in image
|
||||
if i <1 or j <1:
|
||||
continue
|
||||
if i > (im_grey_d.shape[0] - 1) or j > (im_grey_d.shape[0] - 1):
|
||||
continue
|
||||
|
||||
#Calculate GLCM on a 3x3 window
|
||||
glcm_window = im_grey_d[i-1: i+1, j-1 : j+1]
|
||||
glcm = feature.greycomatrix(glcm_window, [1,2], [0], levels=256, symmetric = True, normed = True )
|
||||
|
||||
#Calculate contrast and replace center pixel
|
||||
im_stats[i,j,0] = feature.greycoprops(glcm, 'contrast')
|
||||
im_stats[i,j,1] = feature.greycoprops(glcm, 'dissimilarity')
|
||||
im_stats[i,j,2] = feature.greycoprops(glcm, 'homogeneity')
|
||||
im_stats[i,j,3] = feature.greycoprops(glcm, 'energy')
|
||||
im_stats[i,j,4] = feature.greycoprops(glcm, 'correlation')
|
||||
im_stats[i,j,5] = feature.greycoprops(glcm, 'ASM')
|
||||
|
||||
plt.figure()
|
||||
for i in range(6):
|
||||
plt.subplot(2,3,i+1)
|
||||
plt.imshow(im_stats[:,:,i], cmap='jet')
|
||||
plt.axis('image')
|
||||
plt.draw()
|
||||
#%%
|
||||
from multiprocessing import Pool
|
||||
from itertools import product
|
||||
|
||||
N = 10000000
|
||||
pool = Pool() #defaults to number of available CPU's
|
||||
a = np.ones((N))
|
||||
b = np.ones((N))*2
|
||||
out = np.zeros((N))
|
||||
|
||||
t = time.time()
|
||||
for i in range(len(a)):
|
||||
out[i] = a[i]*b[i]
|
||||
elapsed = time.time() - t
|
||||
print(elapsed)
|
||||
|
||||
def fun(a,b):
|
||||
return a*b
|
||||
|
||||
chunksize = 20 #this may take some guessing ... take a look at the docs to decide
|
||||
for ind, res in enumerate(pool.map(fun, range(N)), chunksize):
|
||||
output.flat[ind] = res
|
||||
#%%
|
||||
|
||||
import gdal, osr
|
||||
import numpy as np
|
||||
from scipy.interpolate import RectBivariateSpline
|
||||
from numpy.lib.stride_tricks import as_strided as ast
|
||||
import dask.array as da
|
||||
from joblib import Parallel, delayed, cpu_count
|
||||
import os
|
||||
from skimage.feature import greycomatrix, greycoprops
|
||||
|
||||
def im_resize(im,Nx,Ny):
|
||||
'''
|
||||
resize array by bivariate spline interpolation
|
||||
'''
|
||||
ny, nx = np.shape(im)
|
||||
xx = np.linspace(0,nx,Nx)
|
||||
yy = np.linspace(0,ny,Ny)
|
||||
|
||||
try:
|
||||
im = da.from_array(im, chunks=1000) #dask implementation
|
||||
except:
|
||||
pass
|
||||
|
||||
newKernel = RectBivariateSpline(np.r_[:ny],np.r_[:nx],im)
|
||||
return newKernel(yy,xx)
|
||||
|
||||
def p_me(Z, win):
|
||||
'''
|
||||
loop to calculate greycoprops
|
||||
'''
|
||||
try:
|
||||
glcm = greycomatrix(Z, [5], [0], 256, symmetric=True, normed=True)
|
||||
cont = greycoprops(glcm, 'contrast')
|
||||
diss = greycoprops(glcm, 'dissimilarity')
|
||||
homo = greycoprops(glcm, 'homogeneity')
|
||||
eng = greycoprops(glcm, 'energy')
|
||||
corr = greycoprops(glcm, 'correlation')
|
||||
ASM = greycoprops(glcm, 'ASM')
|
||||
return (cont, diss, homo, eng, corr, ASM)
|
||||
except:
|
||||
return (0,0,0,0,0,0)
|
||||
|
||||
def norm_shape(shap):
|
||||
'''
|
||||
Normalize numpy array shapes so they're always expressed as a tuple,
|
||||
even for one-dimensional shapes.
|
||||
'''
|
||||
try:
|
||||
i = int(shap)
|
||||
return (i,)
|
||||
except TypeError:
|
||||
# shape was not a number
|
||||
pass
|
||||
|
||||
try:
|
||||
t = tuple(shap)
|
||||
return t
|
||||
except TypeError:
|
||||
# shape was not iterable
|
||||
pass
|
||||
|
||||
raise TypeError('shape must be an int, or a tuple of ints')
|
||||
|
||||
def sliding_window(a, ws, ss = None, flatten = True):
|
||||
'''
|
||||
Source: http://www.johnvinyard.com/blog/?p=268#more-268
|
||||
Parameters:
|
||||
a - an n-dimensional numpy array
|
||||
ws - an int (a is 1D) or tuple (a is 2D or greater) representing the size
|
||||
of each dimension of the window
|
||||
ss - an int (a is 1D) or tuple (a is 2D or greater) representing the
|
||||
amount to slide the window in each dimension. If not specified, it
|
||||
defaults to ws.
|
||||
flatten - if True, all slices are flattened, otherwise, there is an
|
||||
extra dimension for each dimension of the input.
|
||||
|
||||
Returns
|
||||
an array containing each n-dimensional window from a
|
||||
'''
|
||||
if None is ss:
|
||||
# ss was not provided. the windows will not overlap in any direction.
|
||||
ss = ws
|
||||
ws = norm_shape(ws)
|
||||
ss = norm_shape(ss)
|
||||
# convert ws, ss, and a.shape to numpy arrays
|
||||
ws = np.array(ws)
|
||||
ss = np.array(ss)
|
||||
shap = np.array(a.shape)
|
||||
# ensure that ws, ss, and a.shape all have the same number of dimensions
|
||||
ls = [len(shap),len(ws),len(ss)]
|
||||
if 1 != len(set(ls)):
|
||||
raise ValueError(\
|
||||
'a.shape, ws and ss must all have the same length. They were %s' % str(ls))
|
||||
|
||||
# ensure that ws is smaller than a in every dimension
|
||||
if np.any(ws > shap):
|
||||
raise ValueError(\
|
||||
'ws cannot be larger than a in any dimension.\
|
||||
a.shape was %s and ws was %s' % (str(a.shape),str(ws)))
|
||||
|
||||
# how many slices will there be in each dimension?
|
||||
newshape = norm_shape(((shap - ws) // ss) + 1)
|
||||
|
||||
|
||||
# the shape of the strided array will be the number of slices in each dimension
|
||||
# plus the shape of the window (tuple addition)
|
||||
newshape += norm_shape(ws)
|
||||
|
||||
|
||||
# the strides tuple will be the array's strides multiplied by step size, plus
|
||||
# the array's strides (tuple addition)
|
||||
newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
|
||||
a = ast(a,shape = newshape,strides = newstrides)
|
||||
if not flatten:
|
||||
return a
|
||||
# Collapse strided so that it has one more dimension than the window. I.e.,
|
||||
# the new array is a flat list of slices.
|
||||
meat = len(ws) if ws.shape else 0
|
||||
firstdim = (np.product(newshape[:-meat]),) if ws.shape else ()
|
||||
dim = firstdim + (newshape[-meat:])
|
||||
# remove any dimensions with size 1
|
||||
dim = filter(lambda i : i != 1,dim)
|
||||
return a.reshape(dim), newshape
|
||||
|
||||
#Stuff to change
|
||||
win = 3
|
||||
meter = str(win/4)
|
||||
merge = im_grey_d
|
||||
Z,ind = sliding_window(merge,(win,win),(1,1))
|
||||
|
||||
Ny, Nx = np.shape(merge)
|
||||
|
||||
w = Parallel(n_jobs = cpu_count(), verbose=0)(delayed(p_me)(Z[k]) for k in xrange(len(Z)))
|
||||
|
||||
cont = [a[0] for a in w]
|
||||
diss = [a[1] for a in w]
|
||||
homo = [a[2] for a in w]
|
||||
eng = [a[3] for a in w]
|
||||
corr = [a[4] for a in w]
|
||||
ASM = [a[5] for a in w]
|
||||
|
||||
|
||||
#Reshape to match number of windows
|
||||
plt_cont = np.reshape(cont , ( ind[0], ind[1] ) )
|
||||
plt_diss = np.reshape(diss , ( ind[0], ind[1] ) )
|
||||
plt_homo = np.reshape(homo , ( ind[0], ind[1] ) )
|
||||
plt_eng = np.reshape(eng , ( ind[0], ind[1] ) )
|
||||
plt_corr = np.reshape(corr , ( ind[0], ind[1] ) )
|
||||
plt_ASM = np.reshape(ASM , ( ind[0], ind[1] ) )
|
||||
del cont, diss, homo, eng, corr, ASM
|
||||
|
||||
#Resize Images to receive texture and define filenames
|
||||
contrast = im_resize(plt_cont,Nx,Ny)
|
||||
contrast[merge==0]=np.nan
|
||||
dissimilarity = im_resize(plt_diss,Nx,Ny)
|
||||
dissimilarity[merge==0]=np.nan
|
||||
homogeneity = im_resize(plt_homo,Nx,Ny)
|
||||
homogeneity[merge==0]=np.nan
|
||||
energy = im_resize(plt_eng,Nx,Ny)
|
||||
energy[merge==0]=np.nan
|
||||
correlation = im_resize(plt_corr,Nx,Ny)
|
||||
correlation[merge==0]=np.nan
|
||||
ASM = im_resize(plt_ASM,Nx,Ny)
|
||||
ASM[merge==0]=np.nan
|
||||
|
||||
#%%
|
||||
plt.figure()
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
|
||||
plt.axis('image')
|
||||
plt.title('Detected water lines')
|
||||
plt.show()
|
||||
|
||||
vec = im_ms_ps.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1], im_ms_ps.shape[2])
|
||||
vec_pan = im_pan.reshape(im_pan.shape[0]*im_pan.shape[1])
|
||||
features = np.zeros((len(vec), 5))
|
||||
features[:,[0,1,2,3]] = vec[:,[0,1,2,3]]
|
||||
features[:,4] = vec_pan
|
||||
vec_mask = im_cloud.reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
|
||||
# create buffer
|
||||
im_buffer = np.zeros((im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||||
for i, contour in enumerate(wl_pix):
|
||||
indices = [(int(_[0]), int(_[1])) for _ in list(np.round(contour))]
|
||||
for j, idx in enumerate(indices):
|
||||
im_buffer[idx] = 1
|
||||
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_buffer)
|
||||
plt.draw()
|
||||
|
||||
se = morphology.disk(buffer_size)
|
||||
im_buffer = morphology.binary_dilation(im_buffer, se)
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_buffer)
|
||||
plt.draw()
|
||||
|
||||
vec_buffer = (im_buffer == 1).reshape(im_ms_ps.shape[0] * im_ms_ps.shape[1])
|
||||
|
||||
vec_buffer= np.logical_and(vec_buffer, ~vec_mask)
|
||||
|
||||
#vec_buffer = np.ravel_multi_index(z,(im_ms_ps.shape[0], im_ms_ps.shape[1]))
|
||||
|
||||
|
||||
kmeans = KMeans(n_clusters=6, random_state=0).fit(vec[vec_buffer,:])
|
||||
labels = kmeans.labels_
|
||||
labels_full = np.ones((len(vec_mask))) * np.nan
|
||||
labels_full[vec_buffer] = labels
|
||||
im_labels = labels_full.reshape(im_ms_ps.shape[0], im_ms_ps.shape[1])
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_labels)
|
||||
plt.axis('equal')
|
||||
plt.draw()
|
||||
|
||||
utils.compare_images(im_labels, im_pan)
|
||||
|
||||
plt.figure()
|
||||
for i in range(6): plt.plot(kmeans.cluster_centers_[i,:])
|
||||
plt.draw()
|
||||
|
||||
im_sand = im_labels == np.argmax(np.mean(kmeans.cluster_centers_[:,[0,1,2,4]], axis=1))
|
||||
|
||||
im_sand2 = morphology.remove_small_objects(im_sand, min_size=min_beach_size, connectivity=2)
|
||||
im_sand3 = morphology.binary_dilation(im_sand2, morphology.disk(1))
|
||||
plt.figure()
|
||||
plt.imshow(im_sand3)
|
||||
plt.draw()
|
||||
|
||||
im_ms_ps[im_sand3,0] = 0
|
||||
im_ms_ps[im_sand3,1] = 0
|
||||
im_ms_ps[im_sand3,2] = 1
|
||||
|
||||
|
||||
plt.figure()
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
plt.axis('image')
|
||||
plt.title('Sand classification')
|
||||
plt.show()
|
||||
|
||||
#%%
|
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import pytz
|
||||
import csv
|
||||
import pandas as pd
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
|
||||
dt1 = datetime(2018, 4, 17, tzinfo= au_tz)
|
||||
|
||||
dt = []
|
||||
dt.append(dt1)
|
||||
for i in range(1,100):
|
||||
dt1 = dt[i-1]
|
||||
dt.append(dt1 + timedelta(days=16))
|
||||
|
||||
|
||||
dtstr = [_.strftime('%d %b %Y') for _ in dt]
|
||||
|
||||
df = pd.DataFrame(dtstr)
|
||||
df.to_csv('L7_NARRA_dates.csv', index=False, header=False)
|
||||
|
Binary file not shown.
@ -0,0 +1,88 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Process shorelines (clipping and smoothing)
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
import matplotlib.dates as mdates
|
||||
import matplotlib.cm as cm
|
||||
import matplotlib.colors as mcolor
|
||||
from datetime import datetime, timedelta
|
||||
import pickle
|
||||
import pytz
|
||||
import scipy.io as sio
|
||||
import scipy.interpolate as interpolate
|
||||
import statsmodels.api as sm
|
||||
import skimage.measure as measure
|
||||
import simplekml
|
||||
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
au_epsg = 28356
|
||||
# load the satellite-derived shorelines
|
||||
satname = 'L8'
|
||||
sitename = 'OLDBAR'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
|
||||
output = pickle.load(f)
|
||||
|
||||
sl = output['shorelines']
|
||||
dates_sl = output['t']
|
||||
# convert to AEST
|
||||
dates_sl = [_.astimezone(au_tz) for _ in dates_sl]
|
||||
|
||||
# load the reference shoreline points
|
||||
with open(os.path.join(os.getcwd(), 'data', satname, sitename, sitename + '_refpoints.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
|
||||
dist_thresh = 200
|
||||
frac_smooth = 1./15
|
||||
plt.figure()
|
||||
plt.axis('equal')
|
||||
cmap = cm.get_cmap('brg')
|
||||
colours = cmap(np.linspace(0, 1, num=len(sl)))
|
||||
kml = simplekml.Kml()
|
||||
for i in range(len(sl)):
|
||||
# select points of SDS that are close to the manually digitized points
|
||||
idx_ref = [np.min(np.linalg.norm(sl[i][k,:] - refpoints, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
|
||||
# smooth (LOWESS) satellite shoreline
|
||||
sl_smooth = sm.nonparametric.lowess(sl[i][idx_ref,0],sl[i][idx_ref,1], frac=frac_smooth, it = 10)
|
||||
sl_smooth = sl_smooth[:,[1,0]]
|
||||
# sl_smooth = sl[i][idx_ref,:]
|
||||
|
||||
# plt.plot(sl[i][idx_ref,0],sl[i][idx_ref,1], 'k-')
|
||||
plt.plot(sl_smooth[:,0], sl_smooth[:,1], color=colours[i,:], label=dates_sl[i].strftime('%d-%b-%Y'))
|
||||
|
||||
# convert to wgs84 (epsg = 4326)
|
||||
sl_wgs84 = sds.convert_epsg(sl_smooth, 28356, 4326)
|
||||
# save in kml file
|
||||
ln = kml.newlinestring(name=dates_sl[i].strftime('%d-%b-%Y'))
|
||||
ln.coords = sl_wgs84
|
||||
ln.style.labelstyle.color = mcolor.rgb2hex(colours[i,:3])
|
||||
ln.style.linestyle.color = mcolor.rgb2hex(colours[i,:3])
|
||||
|
||||
plt.legend(ncol=3)
|
||||
plt.xlabel('Eastings [m]')
|
||||
plt.ylabel('Northings [m]')
|
||||
plt.title('Oldbar inlet (South)')
|
||||
plt.draw()
|
||||
|
||||
kml.save(satname + sitename + '_shorelines.kml')
|
||||
|
||||
|
@ -0,0 +1,213 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Extract shorelines from Landsat images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds_old1 as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 10 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 50 # number of pixels in a beach (pixel classification)
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'rb') as f:
|
||||
acc_georef = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
|
||||
acc_georef_sorted = [acc_georef[j] for j in idx_sorted]
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
acc_georef_ts = []
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(cloud_cover) + ')')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
# check if image for that date is already present
|
||||
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
||||
# find the index of the image that is repeated
|
||||
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
||||
idx_samedate = idx_samedate[0]
|
||||
print('cloud cover ' + str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
||||
print('acc georef ' + str(acc_georef_sorted[i]) + ' - ' + str(acc_georef_ts[idx_samedate]))
|
||||
# keep image with less cloud cover or best georeferencing accuracy
|
||||
if cloud_cover < cloud_cover_ts[idx_samedate] - 0.01:
|
||||
skip = False
|
||||
elif acc_georef_sorted[i] < acc_georef_ts[idx_samedate]:
|
||||
skip = False
|
||||
else:
|
||||
skip = True
|
||||
if skip:
|
||||
print('skip ' + str(i) + ' - repeated')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
del shorelines[idx_samedate]
|
||||
del t[idx_samedate]
|
||||
del cloud_cover_ts[idx_samedate]
|
||||
del date_acquired_ts[idx_samedate]
|
||||
del acc_georef_ts[idx_samedate]
|
||||
print('keep ' + str(i) + ' - deleted ' + str(idx_samedate))
|
||||
|
||||
# rescale intensities
|
||||
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
|
||||
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, True)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
# calculate NDWI
|
||||
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
# detect edges
|
||||
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, True)
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(wl_pix, georef)
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
||||
|
||||
# classify sand pixels
|
||||
# im_sand = sds.classify_sand_unsupervised(im_ms_ps, im_pan, cloud_mask, wl_pix, False, min_beach_size, True)
|
||||
|
||||
# plot a figure to select the correct water line and discard cloudy images
|
||||
plt.figure()
|
||||
cmap = cm.get_cmap('jet')
|
||||
plt.subplot(121)
|
||||
plt.imshow(im_ms_ps[:,:,[2,1,0]])
|
||||
for j,contour in enumerate(wl_pix):
|
||||
colours = cmap(np.linspace(0, 1, num=len(wl_pix)))
|
||||
plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color=colours[j,:])
|
||||
plt.axis('image')
|
||||
plt.title(file_names_pan[i])
|
||||
plt.subplot(122)
|
||||
centroids = []
|
||||
for j,contour in enumerate(wl):
|
||||
colours = cmap(np.linspace(0, 1, num=len(wl)))
|
||||
centroids.append([np.mean(contour[:, 0]),np.mean(contour[:, 1])])
|
||||
plt.plot(contour[:, 0], contour[:, 1], linewidth=2, color=colours[j,:])
|
||||
plt.plot(np.mean(contour[:, 0]), np.mean(contour[:, 1]), 'o', color=colours[j,:])
|
||||
plt.plot(refpoints[:,0], refpoints[:,1], 'k.')
|
||||
plt.axis('equal')
|
||||
plt.title(file_names_pan[i])
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
# click on the left image to discard, otherwise on the closest centroid in the right image
|
||||
pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
if pt_in[0][0] < 10000:
|
||||
print('skip ' + str(i) + ' - manual')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
# get contour that was selected (click closest to centroid)
|
||||
dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids]
|
||||
shorelines.append(wl[np.argmin(dist_centroid)])
|
||||
|
||||
t.append(timestamps_sorted[i])
|
||||
cloud_cover_ts.append(cloud_cover)
|
||||
acc_georef_ts.append(acc_georef_sorted[i])
|
||||
date_acquired_ts.append(file_names_pan[i][9:19])
|
||||
|
||||
|
||||
#plt.figure()
|
||||
#plt.axis('equal')
|
||||
#for j in range(len(shorelines)):
|
||||
# plt.plot(shorelines[j][:,0], shorelines[j][:,1])
|
||||
#plt.draw()
|
||||
|
||||
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts, 'acc_georef':acc_georef_ts}
|
||||
|
||||
#with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(output, f)
|
||||
#
|
||||
#with open(os.path.join(filepath, sitename + '_skipped' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(idx_skipped, f)
|
||||
#
|
||||
#with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(idx_nocloud, f)
|
@ -0,0 +1,265 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Extract shorelines from Landsat images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
from shapely.geometry import LineString
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.2 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 7 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 20 # number of pixels in a beach (pixel classification)
|
||||
dist_ref = 100
|
||||
min_length_wl = 300
|
||||
manual = False
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'rb') as f:
|
||||
acc_georef = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
try:
|
||||
with open(os.path.join(filepath, sitename + '_skipped_new' + '.pkl'), 'rb') as f:
|
||||
idx_skipped = pickle.load(f)
|
||||
except:
|
||||
idx_skipped = []
|
||||
manual = True
|
||||
|
||||
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
|
||||
acc_georef_sorted = [acc_georef[j] for j in idx_sorted]
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
acc_georef_ts = []
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
if ~manual:
|
||||
if i in idx_skipped:
|
||||
continue
|
||||
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(k + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrows = im_pan.shape[0]
|
||||
ncols = im_pan.shape[1]
|
||||
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(k + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
|
||||
# calculate cloud cover and skip image if too high
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if manual:
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# rescale pansharpened RGB for visualisation
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
# classify image in 4 classes (sand, whitewater, water, other) with NN classifier
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
# # manually validate classification
|
||||
# pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
# if pt_in[0][1] > nrows/2:
|
||||
# print('skip ' + str(i) + ' - wrong classification')
|
||||
# idx_skipped.append(i)
|
||||
# continue
|
||||
|
||||
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
|
||||
if manual:
|
||||
if sum(sum(im_labels[:,:,0])) == 0 :
|
||||
print('skip ' + str(i) + ' - no sand')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
# extract shorelines (new method)
|
||||
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool)
|
||||
|
||||
plt.figure()
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
plt.imshow(im)
|
||||
for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
|
||||
# manually validate detection
|
||||
if manual:
|
||||
pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
plt.close()
|
||||
if pt_in[0][1] > nrows/2:
|
||||
print('skip ' + str(i) + ' - wrong detection')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
plt.close()
|
||||
|
||||
# remove contour points that are around clouds (nan values)
|
||||
for k, contour in enumerate(contours_mwi):
|
||||
if np.any(np.isnan(contour)):
|
||||
index_nan = np.where(np.isnan(contour))[0]
|
||||
contour = np.delete(contour, index_nan, axis=0)
|
||||
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(contours_mwi, georef)
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
||||
|
||||
# remove contours that have a perimeter < min_length_wl as usually they are not shoreline
|
||||
wl_good = []
|
||||
for l, wls in enumerate(wl):
|
||||
coords = [(wls[k,0], wls[k,1]) for k in range(len(wls))]
|
||||
a = LineString(coords) # shapely LineString structure
|
||||
if a.length >= min_length_wl:
|
||||
wl_good.append(wls)
|
||||
|
||||
# pre-process points (list of arrays to single array of points)
|
||||
x_points = np.array([])
|
||||
y_points = np.array([])
|
||||
for k in range(len(wl_good)):
|
||||
x_points = np.append(x_points,wl_good[k][:,0])
|
||||
y_points = np.append(y_points,wl_good[k][:,1])
|
||||
wl_good = np.transpose(np.array([x_points,y_points]))
|
||||
|
||||
# only select points around Narrabeen beach (refpoints given)
|
||||
temp = np.zeros((len(wl_good))).astype(bool)
|
||||
for k in range(len(refpoints)):
|
||||
temp = np.logical_or(np.linalg.norm(wl_good - refpoints[k,[0,1]], axis=1) < dist_ref, temp)
|
||||
wl_final = wl_good[temp]
|
||||
|
||||
# plt.figure()
|
||||
# plt.axis('equal')
|
||||
# plt.plot(wl_final[:,0],wl_final[:,1],'k.')
|
||||
# plt.draw()
|
||||
|
||||
# save data
|
||||
shorelines.append(wl_final)
|
||||
t.append(timestamps_sorted[i])
|
||||
cloud_cover_ts.append(cloud_cover)
|
||||
acc_georef_ts.append(acc_georef_sorted[i])
|
||||
date_acquired_ts.append(file_names_pan[i][9:19])
|
||||
|
||||
|
||||
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts, 'acc_georef':acc_georef_ts}
|
||||
|
||||
|
||||
#with open(os.path.join(filepath, sitename + '_output_new' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(output, f)
|
||||
#
|
||||
#with open(os.path.join(filepath, sitename + '_skipped_new' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(idx_skipped, f)
|
||||
|
||||
# plt.figure()
|
||||
# plt.axis('equal')
|
||||
# plt.plot(refpoints[:,0], refpoints[:,1], 'ko')
|
||||
# plt.plot(all_points[temp,0], all_points[temp,1], 'go')
|
||||
# plt.plot(all_points[~temp,0], all_points[~temp,1], 'ro')
|
||||
# plt.draw()
|
||||
|
||||
# extract shorelines (old method)
|
||||
# im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
# wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
||||
|
||||
# plt.figure()
|
||||
# plt.imshow(im_display)
|
||||
# for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
# for k,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
|
||||
# plt.draw()
|
||||
|
@ -0,0 +1,284 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#==========================================================#
|
||||
# Extract shorelines from Landsat images
|
||||
#==========================================================#
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
from shapely.geometry import LineString
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
import skimage.morphology as morphology
|
||||
|
||||
# machine learning modules
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.preprocessing import StandardScaler, Normalizer
|
||||
from sklearn.externals import joblib
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
# some settings
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# parameters
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
buffer_size = 7 # radius (in pixels) of disk for buffer (pixel classification)
|
||||
min_beach_size = 20 # number of pixels in a beach (pixel classification)
|
||||
dist_ref = 100
|
||||
min_length_wl = 300
|
||||
|
||||
# load metadata (timestamps and epsg code) for the collection
|
||||
satname = 'L8'
|
||||
sitename = 'NARRA'
|
||||
#sitename = 'OLDBAR'
|
||||
|
||||
# Load metadata
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'rb') as f:
|
||||
acc_georef = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
|
||||
refpoints = pickle.load(f)
|
||||
# sort timestamps and georef accuracy (dowloaded images are sorted by date in directory)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
idx_sorted = sorted(range(len(timestamps)), key=timestamps.__getitem__)
|
||||
acc_georef_sorted = [acc_georef[j] for j in idx_sorted]
|
||||
|
||||
# path to images
|
||||
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
||||
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
|
||||
# initialise some variables
|
||||
cloud_cover_ts = []
|
||||
date_acquired_ts = []
|
||||
acc_georef_ts = []
|
||||
idx_skipped = []
|
||||
idx_nocloud = []
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
#%%
|
||||
for i in range(N):
|
||||
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
nrows = im_pan.shape[0]
|
||||
ncols = im_pan.shape[1]
|
||||
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for k in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
|
||||
# cloud mask
|
||||
im_qa = im_ms[:,:,5]
|
||||
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
||||
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
||||
order=0, preserve_range=True,
|
||||
mode='constant').astype('bool_')
|
||||
# resize the image using bilinear interpolation (order 1)
|
||||
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
||||
order=1, preserve_range=True, mode='constant')
|
||||
|
||||
# check if -inf or nan values and add to cloud mask
|
||||
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
||||
im_nan = np.isnan(im_ms[:,:,0])
|
||||
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
||||
|
||||
# calculate cloud cover and skip image if too high
|
||||
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
||||
if cloud_cover > cloud_thresh:
|
||||
print('skip ' + str(i) + ' - cloudy (' + str(np.round(cloud_cover*100).astype(int)) + '%)')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
idx_nocloud.append(i)
|
||||
|
||||
# pansharpen rgb image
|
||||
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
||||
# rescale pansharpened RGB for visualisation
|
||||
im_display = sds.rescale_image_intensity(im_ms_ps[:,:,[2,1,0]], cloud_mask, 100, False)
|
||||
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
||||
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
||||
|
||||
# classify image in 4 classes (sand, whitewater, water, other) with NN classifier
|
||||
im_classif, im_labels = sds.classify_image_NN(im_ms_ps, im_pan, cloud_mask, min_beach_size, plot_bool)
|
||||
|
||||
# # manually validate classification
|
||||
# pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
# if pt_in[0][1] > nrows/2:
|
||||
# print('skip ' + str(i) + ' - wrong classification')
|
||||
# idx_skipped.append(i)
|
||||
# continue
|
||||
|
||||
# if there are no sand pixels, skip the image (maybe later change the detection method with old method)
|
||||
if sum(sum(im_labels[:,:,0])) == 0 :
|
||||
print('skip ' + str(i) + ' - no sand')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
# extract shorelines (new method)
|
||||
contours_wi, contours_mwi = sds.find_wl_contours2(im_ms_ps, im_labels, cloud_mask, buffer_size, plot_bool)
|
||||
|
||||
plt.figure()
|
||||
im = np.copy(im_display)
|
||||
# define colours for plot
|
||||
colours = np.array([[1,128/255,0/255],[204/255,1,1],[0,0,204/255]])
|
||||
for k in range(0,im_labels.shape[2]):
|
||||
im[im_labels[:,:,k],0] = colours[k,0]
|
||||
im[im_labels[:,:,k],1] = colours[k,1]
|
||||
im[im_labels[:,:,k],2] = colours[k,2]
|
||||
plt.imshow(im)
|
||||
for k,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color='k', linestyle='--')
|
||||
mng = plt.get_current_fig_manager()
|
||||
mng.window.showMaximized()
|
||||
plt.tight_layout()
|
||||
plt.draw()
|
||||
|
||||
|
||||
# manually validate detection
|
||||
pt_in = np.array(ginput(n=1, timeout=1000))
|
||||
if pt_in[0][1] > nrows/2:
|
||||
print('skip ' + str(i) + ' - wrong detection')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
|
||||
# remove contour points that are around clouds (nan values)
|
||||
for k, contour in enumerate(contours_mwi):
|
||||
if np.any(np.isnan(contour)):
|
||||
index_nan = np.where(np.isnan(contour))[0]
|
||||
contour = np.delete(contour, index_nan, axis=0)
|
||||
|
||||
# convert from pixels to world coordinates
|
||||
wl_coords = sds.convert_pix2world(contours_mwi, georef)
|
||||
# convert to output epsg spatial reference
|
||||
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
||||
|
||||
# remove contours that have a perimeter < min_length_wl as usually they are not shoreline
|
||||
wl_good = []
|
||||
for l, wls in enumerate(wl):
|
||||
coords = [(wls[k,0], wls[k,1]) for k in range(len(wls))]
|
||||
a = LineString(coords) # shapely LineString structure
|
||||
if a.length >= min_length_wl:
|
||||
wl_good.append(wls)
|
||||
|
||||
# pre-process points (list of arrays to single array of points)
|
||||
x_points = np.array([])
|
||||
y_points = np.array([])
|
||||
for k in range(len(wl_good)):
|
||||
x_points = np.append(x_points,wl_good[k][:,0])
|
||||
y_points = np.append(y_points,wl_good[k][:,1])
|
||||
wl_good = np.transpose(np.array([x_points,y_points]))
|
||||
|
||||
# only select points around Narrabeen beach (refpoints given)
|
||||
temp = np.zeros((len(wl_good))).astype(bool)
|
||||
for k in range(len(refpoints)):
|
||||
temp = np.logical_or(np.linalg.norm(wl_good - refpoints[k,[0,1]], axis=1) < dist_ref, temp)
|
||||
wl_final = wl_good[temp]
|
||||
|
||||
plt.figure()
|
||||
plt.axis('equal')
|
||||
plt.plot(wl_final[:,0],wl_final[:,1],'k.')
|
||||
plt.draw()
|
||||
|
||||
|
||||
# check if image for that date already exists and choose the best in terms of cloud cover and georeferencing
|
||||
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
||||
|
||||
# find the index of the image that is repeated
|
||||
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
||||
idx_samedate = idx_samedate[0]
|
||||
# print('cloud cover ' + str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
||||
# print('acc georef ' + str(acc_georef_sorted[i]) + ' - ' + str(acc_georef_ts[idx_samedate]))
|
||||
|
||||
# keep image with less cloud cover or best georeferencing accuracy
|
||||
if cloud_cover < cloud_cover_ts[idx_samedate] - 0.01:
|
||||
skip = False
|
||||
elif acc_georef_sorted[i] < acc_georef_ts[idx_samedate]:
|
||||
skip = False
|
||||
else:
|
||||
skip = True
|
||||
|
||||
if skip:
|
||||
print('skip ' + str(i) + ' - repeated')
|
||||
idx_skipped.append(i)
|
||||
continue
|
||||
else:
|
||||
del shorelines[idx_samedate]
|
||||
del t[idx_samedate]
|
||||
del cloud_cover_ts[idx_samedate]
|
||||
del date_acquired_ts[idx_samedate]
|
||||
del acc_georef_ts[idx_samedate]
|
||||
print('keep ' + str(i) + ' - deleted ' + str(idx_samedate))
|
||||
|
||||
|
||||
# save data
|
||||
shorelines.append(wl_final)
|
||||
t.append(timestamps_sorted[i])
|
||||
cloud_cover_ts.append(cloud_cover)
|
||||
acc_georef_ts.append(acc_georef_sorted[i])
|
||||
date_acquired_ts.append(file_names_pan[i][9:19])
|
||||
|
||||
output = {'t':t, 'shorelines':shorelines, 'cloud_cover':cloud_cover_ts, 'acc_georef':acc_georef_ts}
|
||||
|
||||
#with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(output, f)
|
||||
#
|
||||
#with open(os.path.join(filepath, sitename + '_skipped2' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(idx_skipped, f)
|
||||
#
|
||||
#with open(os.path.join(filepath, sitename + '_idxnocloud2' + '.pkl'), 'wb') as f:
|
||||
# pickle.dump(idx_nocloud, f)
|
||||
|
||||
# plt.figure()
|
||||
# plt.axis('equal')
|
||||
# plt.plot(refpoints[:,0], refpoints[:,1], 'ko')
|
||||
# plt.plot(all_points[temp,0], all_points[temp,1], 'go')
|
||||
# plt.plot(all_points[~temp,0], all_points[~temp,1], 'ro')
|
||||
# plt.draw()
|
||||
|
||||
# extract shorelines (old method)
|
||||
# im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
||||
# wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
||||
|
||||
# plt.figure()
|
||||
# plt.imshow(im_display)
|
||||
# for i,contour in enumerate(contours_mwi): plt.plot(contour[:, 1], contour[:, 0], linewidth=3, color='k')
|
||||
# for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linestyle='--', linewidth=1, color='w')
|
||||
# plt.draw()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue