forked from kilianv/CoastSat_WRL
You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
166 lines
6.4 KiB
Python
166 lines
6.4 KiB
Python
# -*- coding: utf-8 -*-
|
|
|
|
#==========================================================#
|
|
# Make a gif of the satellite images
|
|
#==========================================================#
|
|
|
|
# Initial settings
|
|
import os
|
|
import numpy as np
|
|
import matplotlib
|
|
matplotlib.use("Agg")
|
|
import matplotlib.pyplot as plt
|
|
import matplotlib.animation as manimation
|
|
import ee
|
|
import pdb
|
|
|
|
# other modules
|
|
from osgeo import gdal, ogr, osr
|
|
import pickle
|
|
import matplotlib.cm as cm
|
|
from pylab import ginput
|
|
import imageio
|
|
|
|
# image processing modules
|
|
import skimage.filters as filters
|
|
import skimage.exposure as exposure
|
|
import skimage.transform as transform
|
|
import sklearn.decomposition as decomposition
|
|
import skimage.measure as measure
|
|
|
|
# import own modules
|
|
import functions.utils as utils
|
|
import functions.sds as sds
|
|
|
|
# some settings
|
|
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
|
plt.rcParams['axes.grid'] = True
|
|
plt.rcParams['figure.max_open_warning'] = 100
|
|
ee.Initialize()
|
|
|
|
# parameters
|
|
cloud_thresh = 0.5 # threshold for cloud cover
|
|
plot_bool = False # if you want the plots
|
|
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
|
min_contour_points = 100# minimum number of points contained in each water line
|
|
output_epsg = 28356 # GDA94 / MGA Zone 56
|
|
|
|
# load metadata (timestamps and epsg code) for the collection
|
|
satname = 'L8'
|
|
#sitename = 'NARRA'
|
|
sitename = 'OLDBAR_inlet'
|
|
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
|
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
|
timestamps = pickle.load(f)
|
|
timestamps_sorted = sorted(timestamps) # sort timestamps since images are sorted in directory
|
|
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
|
input_epsg = pickle.load(f)
|
|
with open(os.path.join(filepath, sitename + '_refpoints2' + '.pkl'), 'rb') as f:
|
|
refpoints = pickle.load(f)
|
|
|
|
# path to images
|
|
file_path_pan = os.path.join(os.getcwd(), 'data', satname, sitename, 'pan')
|
|
file_path_ms = os.path.join(os.getcwd(), 'data', satname, sitename, 'ms')
|
|
file_names_pan = os.listdir(file_path_pan)
|
|
file_names_ms = os.listdir(file_path_ms)
|
|
N = len(file_names_pan)
|
|
|
|
# initialise some variables
|
|
cloud_cover_ts = []
|
|
date_acquired_ts = []
|
|
idx_skipped = []
|
|
idx_nocloud = []
|
|
|
|
t = []
|
|
shorelines = []
|
|
|
|
with open(os.path.join(filepath, sitename + '_idxnocloud' + '.pkl'), 'rb') as f:
|
|
idx_nocloud = pickle.load(f)
|
|
|
|
for i in idx_nocloud:
|
|
# read pan image
|
|
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
|
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
|
georef = np.array(data.GetGeoTransform())
|
|
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
|
im_pan = np.stack(bands, 2)[:,:,0]
|
|
# read ms image
|
|
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
|
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
|
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
|
im_ms = np.stack(bands, 2)
|
|
# cloud mask
|
|
im_qa = im_ms[:,:,5]
|
|
cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
|
|
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
|
|
order=0, preserve_range=True,
|
|
mode='constant').astype('bool_')
|
|
# resize the image using bilinear interpolation (order 1)
|
|
im_ms = transform.resize(im_ms,(im_pan.shape[0], im_pan.shape[1]),
|
|
order=1, preserve_range=True, mode='constant')
|
|
# check if -inf or nan values and add to cloud mask
|
|
im_inf = np.isin(im_ms[:,:,0], -np.inf)
|
|
im_nan = np.isnan(im_ms[:,:,0])
|
|
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
|
|
cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
|
|
if cloud_cover > cloud_thresh:
|
|
print('skipped cloud ' + str(i))
|
|
idx_skipped.append(i)
|
|
continue
|
|
# idx_nocloud.append(i)
|
|
# check if image for that date is already present and keep the one with less clouds
|
|
if file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] in date_acquired_ts:
|
|
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
|
|
idx_samedate = idx_samedate[0]
|
|
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
|
|
if cloud_cover >= cloud_cover_ts[idx_samedate]:
|
|
print('skipped double ' + str(i))
|
|
idx_skipped.append(i)
|
|
continue
|
|
else:
|
|
del shorelines[idx_samedate]
|
|
del t[idx_samedate]
|
|
del cloud_cover_ts[idx_samedate]
|
|
del date_acquired_ts[idx_samedate]
|
|
print('deleted ' + str(idx_samedate))
|
|
|
|
# rescale intensities
|
|
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
|
|
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
|
|
# pansharpen rgb image
|
|
im_ms_ps = sds.pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask, plot_bool)
|
|
# add down-sized bands for NIR and SWIR (since pansharpening is not possible)
|
|
im_ms_ps = np.append(im_ms_ps, im_ms[:,:,[3,4]], axis=2)
|
|
# calculate NDWI
|
|
im_ndwi = sds.nd_index(im_ms_ps[:,:,3], im_ms_ps[:,:,1], cloud_mask, plot_bool)
|
|
# detect edges
|
|
wl_pix = sds.find_wl_contours(im_ndwi, cloud_mask, min_contour_points, plot_bool)
|
|
# convert from pixels to world coordinates
|
|
wl_coords = sds.convert_pix2world(wl_pix, georef)
|
|
# convert to output epsg spatial reference
|
|
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
|
|
|
|
# save images as png for video
|
|
fig = plt.figure()
|
|
plt.grid(False)
|
|
plt.imshow(im_ms_ps[:,:,[2,1,0]], animated=True)
|
|
mng = plt.get_current_fig_manager()
|
|
mng.window.showMaximized()
|
|
plt.title(file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10])
|
|
plt.xticks([])
|
|
plt.yticks([])
|
|
plt.axis('equal')
|
|
plt.tight_layout()
|
|
plt.draw()
|
|
plt.savefig(os.path.join(filepath,
|
|
'plots', file_names_pan[i][len(satname)+1+len(sitename)+1:len(satname)+1+len(sitename)+1+10] + '.png'),
|
|
dpi = 300)
|
|
plt.close()
|
|
|
|
# create gif
|
|
images = []
|
|
filenames = os.listdir(os.path.join(filepath, 'plots'))
|
|
with imageio.get_writer('movie.gif', mode='I', duration=0.2) as writer:
|
|
for filename in filenames:
|
|
image = imageio.imread(os.path.join(filepath,'plots',filename))
|
|
writer.append_data(image) |