work on shoreline comparison

development
kvos 7 years ago
parent 39c5bb05e1
commit 7ca64f2fa0

Binary file not shown.

@ -55,3 +55,7 @@ def compare_images(im1, im2):
def find_indices(lst, condition): def find_indices(lst, condition):
"imitation of MATLAB find function" "imitation of MATLAB find function"
return [i for i, elem in enumerate(lst) if condition(elem)] return [i for i, elem in enumerate(lst) if condition(elem)]
def reject_outliers(data, m=2):
"rejects outliers in a numpy array"
return data[abs(data - np.mean(data)) < m * np.std(data)]

@ -30,18 +30,20 @@ import skimage.measure as measure
import functions.utils as utils import functions.utils as utils
import functions.sds as sds import functions.sds as sds
# some settings
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100 plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize() ee.Initialize()
# initial settings # parameters
cloud_thresh = 0.5 # threshold for cloud cover cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56 output_epsg = 28356 # GDA94 / MGA Zone 56
# load metadata (timestamps and epsg code) for the collection
satname = 'L8' satname = 'L8'
sitename = 'NARRA' sitename = 'NARRA'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename) filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
@ -51,13 +53,18 @@ timestamps_sorted = sorted(timestamps)
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f: with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f) input_epsg = pickle.load(f)
# path to images
file_path_pan = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'pan') file_path_pan = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'pan')
file_path_ms = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'ms') file_path_ms = os.path.join(os.getcwd(), 'data', 'L8', 'NARRA', 'ms')
file_names_pan = os.listdir(file_path_pan) file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms) file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan) N = len(file_names_pan)
idx_high_cloud = []
# initialise some variables
cloud_cover_ts = []
date_acquired_ts = []
idx_skipped = []
t = [] t = []
shorelines = [] shorelines = []
@ -86,11 +93,27 @@ for i in range(N):
im_inf = np.isin(im_ms[:,:,0], -np.inf) im_inf = np.isin(im_ms[:,:,0], -np.inf)
im_nan = np.isnan(im_ms[:,:,0]) im_nan = np.isnan(im_ms[:,:,0])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan) cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
cloud_content = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1]) cloud_cover = sum(sum(cloud_mask.astype(int)))/(cloud_mask.shape[0]*cloud_mask.shape[1])
if cloud_content > cloud_thresh: if cloud_cover > cloud_thresh:
print('skipped ' + str(i)) print('skipped cloud ' + str(i))
idx_high_cloud.append(i) idx_skipped.append(i)
continue continue
# check if image for that date is already present and keep the one with less clouds
if file_names_pan[i][9:19] in date_acquired_ts:
idx_samedate = utils.find_indices(date_acquired_ts, lambda e : e == file_names_pan[i][9:19])
idx_samedate = idx_samedate[0]
print(str(cloud_cover) + ' - ' + str(cloud_cover_ts[idx_samedate]))
if cloud_cover >= cloud_cover_ts[idx_samedate]:
print('skipped double ' + str(i))
idx_skipped.append(i)
continue
else:
del shorelines[idx_samedate]
del t[idx_samedate]
del cloud_cover_ts[idx_samedate]
del date_acquired_ts[idx_samedate]
print('deleted ' + str(idx_samedate))
# rescale intensities # rescale intensities
im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool) im_ms = sds.rescale_image_intensity(im_ms, cloud_mask, prob_high, plot_bool)
im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool) im_pan = sds.rescale_image_intensity(im_pan, cloud_mask, prob_high, plot_bool)
@ -107,9 +130,9 @@ for i in range(N):
# convert to output epsg spatial reference # convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg) wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
# plot a figure to select the correct water line and discard cloudy images
plt.figure() plt.figure()
cmap = cm.get_cmap('jet') cmap = cm.get_cmap('jet')
plt.subplot(121) plt.subplot(121)
plt.imshow(im_ms_ps[:,:,[2,1,0]]) plt.imshow(im_ms_ps[:,:,[2,1,0]])
for j,contour in enumerate(wl_pix): for j,contour in enumerate(wl_pix):
@ -117,7 +140,6 @@ for i in range(N):
plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color=colours[j,:]) plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color=colours[j,:])
plt.axis('image') plt.axis('image')
plt.title(file_names_pan[i]) plt.title(file_names_pan[i])
plt.subplot(122) plt.subplot(122)
centroids = [] centroids = []
for j,contour in enumerate(wl): for j,contour in enumerate(wl):
@ -131,17 +153,18 @@ for i in range(N):
mng.window.showMaximized() mng.window.showMaximized()
plt.tight_layout() plt.tight_layout()
plt.draw() plt.draw()
# click on the left image to discard, otherwise on the closest centroid in the right image
pt_in = np.array(ginput(1)) pt_in = np.array(ginput(1))
if pt_in[0][0] < 10000: if pt_in[0][0] < 10000:
print('skipped m ' + str(i)) print('skipped manual ' + str(i))
idx_high_cloud.append(i) idx_skipped.append(i)
continue continue
dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids] dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids]
shorelines.append(wl[np.argmin(dist_centroid)]) shorelines.append(wl[np.argmin(dist_centroid)])
t.append(timestamps_sorted[i]) t.append(timestamps_sorted[i])
cloud_cover_ts.append(cloud_cover)
date_acquired_ts.append(file_names_pan[i][9:19])
#plt.figure() #plt.figure()
#plt.axis('equal') #plt.axis('equal')
@ -151,7 +174,7 @@ for i in range(N):
output = {'t':t, 'shorelines':shorelines} output = {'t':t, 'shorelines':shorelines}
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'wb') as f: with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'wb') as f:
pickle.dump(output, f) pickle.dump(output, f)

@ -19,6 +19,7 @@ import pytz
import scipy.io as sio import scipy.io as sio
import scipy.interpolate as interpolate import scipy.interpolate as interpolate
import statsmodels.api as sm import statsmodels.api as sm
import skimage.measure as measure
# my functions # my functions
import functions.utils as utils import functions.utils as utils
@ -41,7 +42,7 @@ dates_quad = [datetime(dates_quad[i,0], dates_quad[i,1], dates_quad[i,2],
satname = 'L8' satname = 'L8'
sitename = 'NARRA' sitename = 'NARRA'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename) filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f: with open(os.path.join(filepath, sitename + '_output2' + '.pkl'), 'rb') as f:
output = pickle.load(f) output = pickle.load(f)
dates_l8 = output['t'] dates_l8 = output['t']
# convert to AEST # convert to AEST
@ -58,6 +59,18 @@ dates_wave = [datetime(wave_data['dates'][i,0], wave_data['dates'][i,1],
wave_data['dates'][i,2], wave_data['dates'][i,3], wave_data['dates'][i,2], wave_data['dates'][i,3],
wave_data['dates'][i,4], wave_data['dates'][i,5], wave_data['dates'][i,4], wave_data['dates'][i,5],
tzinfo=au_tz) for i in idx] tzinfo=au_tz) for i in idx]
# load tide data
filename = 'SydTideData.mat'
filepath = os.path.join(os.getcwd(), 'data', 'tide', filename)
tide_data = sio.loadmat(filepath)
idx = utils.find_indices(tide_data['dates'][:,0], lambda e: e >= dates_l8[0].year and e <= dates_l8[-1].year)
tide = np.array([tide_data['tide'][i][0] for i in idx])
dates_tide = [datetime(tide_data['dates'][i,0], tide_data['dates'][i,1],
tide_data['dates'][i,2], tide_data['dates'][i,3],
tide_data['dates'][i,4], tide_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
#%% make a plot of all the dates #%% make a plot of all the dates
orange = [255/255,140/255,0] orange = [255/255,140/255,0]
blue = [0,191/255,255/255] blue = [0,191/255,255/255]
@ -99,7 +112,7 @@ f.subplots_adjust(hspace=0.2)
plt.draw() plt.draw()
#%% calculate days difference #%% calculate days difference
diff_days = [ [(x - _).days for _ in dates_quad] for x in dates_l8] diff_days = [ [(x - _).days for _ in dates_quad] for x in dates_l8]
max_diff = 5 max_diff = 10
idx_closest = [utils.find_indices(_, lambda e: abs(e) <= max_diff) for _ in diff_days] idx_closest = [utils.find_indices(_, lambda e: abs(e) <= max_diff) for _ in diff_days]
dates_diff = [] dates_diff = []
for i in range(len(idx_closest)): for i in range(len(idx_closest)):
@ -118,12 +131,45 @@ for i in range(len(idx_closest)):
"days diff": diff_days[i][idx_closest[i][0]] "days diff": diff_days[i][idx_closest[i][0]]
}) })
np.mean([ np.abs(_['days diff']) for _ in dates_diff]) # make a plot
plt.figure()
counter = 0
for i in range(len(dates_diff)):
counter = counter + 1
if dates_diff[i]['date quad'] > dates_diff[i]['date sat']:
date_min = dates_diff[i]['date sat']
date_max = dates_diff[i]['date quad']
color1 = orange
color2 = blue
else:
date_min = dates_diff[i]['date quad']
date_max = dates_diff[i]['date sat']
color1 = blue
color2 = orange
idx_t = utils.find_indices(dates_wave, lambda e : e >= date_min and e <= date_max)
hsigmax = np.nanmax([hsig[i] for i in idx_t])
hsigmin = np.nanmin([hsig[i] for i in idx_t])
if counter > 9:
counter = 1
plt.figure()
ax = plt.subplot(3,3,counter)
plt.plot([dates_wave[i] for i in idx_t], [hsig[i] for i in idx_t], 'k-', linewidth=1.5)
plt.plot([date_min, date_min], [0, 4.5], color=color2, label='survey')
plt.plot([date_max, date_max], [0, 4.5], color=color1, label='landsat8')
plt.ylabel('Hs [m]')
ax.xaxis.set_major_locator(mdates.DayLocator(tz=au_tz))
ax.xaxis.set_minor_locator(mdates.HourLocator(tz=au_tz))
ax.xaxis.set_major_formatter(mdates.DateFormatter('%d'))
ax.xaxis.set_minor_locator(months)
plt.title(dates_diff[i]['date sat'].strftime('%b %Y') + ' (' + str(abs(dates_diff[i]['days diff'])) + ' days)')
plt.draw()
plt.gcf().subplots_adjust(hspace=0.5)
np.mean([ np.abs(_['days diff']) for _ in dates_diff])
#%% compare shorelines #%% compare shorelines
dist_thresh = 200 # maximum distance between an sds point and a narrabeen point dist_thresh = 200 # maximum distance between an sds point and a narrabeen point
frac_smooth = 1./12 # fraction of the data used for smoothing (the bigger the smoother) frac_smooth = 1./10 # fraction of the data used for smoothing (the bigger the smoother)
dist_buffer = 50 # buffer of points selected for interpolation dist_buffer = 50 # buffer of points selected for interpolation
# load quadbike .mat files # load quadbike .mat files
@ -140,6 +186,7 @@ with open(os.path.join(os.getcwd(), 'olddata', 'narra_beach' + '.pkl'), 'rb') as
dates_quad = [datetime(int(_[6:10]), int(_[11:13]), int(_[14:16]), tzinfo= au_tz) for _ in filenames] dates_quad = [datetime(int(_[6:10]), int(_[11:13]), int(_[14:16]), tzinfo= au_tz) for _ in filenames]
zav = [] zav = []
ztide = []
for i in range(len(dates_diff)): for i in range(len(dates_diff)):
# select closest 3D survey # select closest 3D survey
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).days for _ in dates_quad]))) idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).days for _ in dates_quad])))
@ -155,26 +202,39 @@ for i in range(len(dates_diff)):
idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])] idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 6) sl_smooth = sm.nonparametric.lowess(sl[i][idx_beach,0],sl[i][idx_beach,1], frac=frac_smooth, it = 6)
sl_smooth = sl_smooth[:,[1,0]] sl_smooth = sl_smooth[:,[1,0]]
# make plot # find water level at the time the image was acquired
plt.figure() idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).total_seconds() for _ in dates_tide])))
plt.axis('equal') tide_level = tide[idx_closest]
plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'), ztide.append(tide_level)
label='quad data') # find contour corresponding to the water level
plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1], 'ko-', markersize=3) if tide_level < np.nanmin(survey3d['z']):
plt.plot(sl_smooth[:,0], sl_smooth[:,1], 'ro-', markersize=3) tide_level = np.nanmin(survey3d['z'])
plt.xlabel('Eastings [m]') sl_tide = measure.find_contours(survey3d['z'], tide_level)
plt.ylabel('Northings [m]') sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
plt.title('Local weighted scatterplot smoothing (LOWESS)') count = 0
plt.draw() while len(sl_tide) < 900:
count = count + 1
tide_level = tide_level + 0.05*count
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
print(str(0.05*count) + ' - ' + str(len(sl_tide)))
else:
sl_tide = measure.find_contours(survey3d['z'], tide_level)
sl_tide = sl_tide[np.argmax(np.array([len(_) for _ in sl_tide]))]
if np.any(np.isnan(sl_tide)):
index_nan = np.where(np.isnan(sl_tide))[0]
sl_tide = np.delete(sl_tide, index_nan, axis=0)
xtide = [survey3d['x'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
ytide = [survey3d['y'][int(np.round(sl_tide[m,0])), int(np.round(sl_tide[m,1]))] for m in range(sl_tide.shape[0])]
# interpolate SDS on 3D surface to get elevation
zq = np.zeros((sl_smooth.shape[0], 1)) zq = np.zeros((sl_smooth.shape[0], 1))
for j in range(sl_smooth.shape[0]): for j in range(sl_smooth.shape[0]):
xq = sl_smooth[j,0] xq = sl_smooth[j,0]
yq = sl_smooth[j,1] yq = sl_smooth[j,1]
dist_q = np.linalg.norm(np.transpose(np.array([[xq - _ for _ in xs],[yq - _ for _ in ys]])), axis=1) dist_q = np.linalg.norm(np.transpose(np.array([[xq - _ for _ in xs],[yq - _ for _ in ys]])), axis=1)
idx_buffer = dist_q <= dist_buffer idx_buffer = dist_q <= dist_buffer
tck = interpolate.bisplrep(xs[idx_buffer], ys[idx_buffer], zs[idx_buffer])
zq[j] = interpolate.bisplev(xq, yq, tck)
# plt.figure() # plt.figure()
# plt.axis('equal') # plt.axis('equal')
# plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'), # plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'),
@ -182,40 +242,64 @@ for i in range(len(dates_diff)):
# plt.plot(xs[idx_buffer], ys[idx_buffer], 'ko') # plt.plot(xs[idx_buffer], ys[idx_buffer], 'ko')
# plt.plot(xq,yq,'ro') # plt.plot(xq,yq,'ro')
# plt.draw() # plt.draw()
tck = interpolate.bisplrep(xs[idx_buffer], ys[idx_buffer], zs[idx_buffer])
zq[j] = interpolate.bisplev(xq, yq, tck)
zav.append(np.median(zq)) zav.append(np.median(utils.reject_outliers(zq, m=2)))
# make plot
red = [255/255, 0, 0]
gray = [0.75, 0.75, 0.75]
plt.figure() plt.figure()
plt.subplot(121)
plt.axis('equal')
plt.scatter(xs, ys, s=10, c=zs, marker='o', cmap=cm.get_cmap('jet'),
label='3D survey')
plt.plot(xtide, ytide, '--', color=gray, linewidth=2.5, label='tide level contour')
plt.plot(sl_smooth[:,0], sl_smooth[:,1], '-', color=red, linewidth=2.5, label='SDS')
# plt.plot(sl[i][idx_beach,0], sl[i][idx_beach,1], 'go', markersize=3)
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.title('Shoreline comparison')
plt.colorbar(label='mAHD')
plt.legend()
plt.ylim((6266100, 6267000))
plt.subplot(122)
plt.plot(sl_smooth[:,1], zq, 'ko-', markersize=5) plt.plot(sl_smooth[:,1], zq, 'ko-', markersize=5)
plt.plot([sl_smooth[0,1], sl_smooth[-1,1]], [zav[i], zav[i]], 'r--') plt.plot([sl_smooth[0,1], sl_smooth[-1,1]], [zav[i], zav[i]], 'r--', label='median')
plt.plot([sl_smooth[0,1], sl_smooth[-1,1]], [ztide[i], ztide[i]], 'g--', label = 'measured tide')
plt.xlabel('Northings [m]') plt.xlabel('Northings [m]')
plt.ylabel('Elevation [mAHD]') plt.ylabel('Elevation [mAHD]')
plt.title('Interpolated SDS elevation') plt.title('Alongshore SDS elevation')
plt.legend()
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.tight_layout()
plt.draw() plt.draw()
print(i)
# Calculate some error statistics
zav = np.array(zav)
ztide = np.array(ztide)
#%%
i = 0
lowess = sm.nonparametric.lowess
x = sl[i][idx_beach,0]
y = sl[i][idx_beach,1]
sl_smooth = lowess(x,y, frac=1./15, it = 6)
plt.figure() plt.figure()
plt.axis('equal') plt.plot(zav - ztide)
plt.scatter
plt.plot(x,y,'bo-', linewidth=2, marker='o',
color='b', label='original')
plt.plot(sl_smooth[:,1], sl_smooth[:,0], linewidth=2, marker='o',
color='r', label='smooth')
plt.legend()
plt.xlabel('Eastings [m]')
plt.ylabel('Northings [m]')
plt.title('Local weighted scatterplot smoothing (LOWESS)')
plt.draw() plt.draw()
zav - ztide
#%% plot to show LOWESS smoothing
#i = 0
#idx_beach = [np.min(np.linalg.norm(sl[i][k,:] - narrabeach, axis=1)) < dist_thresh for k in range(sl[i].shape[0])]
#x = sl[i][idx_beach,0]
#y = sl[i][idx_beach,1]
#sl_smooth = lowess(x,y, frac=1./10, it = 10)
#
#plt.figure()
#plt.axis('equal')
#plt.scatter
#plt.plot(x,y,'bo', linewidth=2, label='original SDS')
#plt.plot(sl_smooth[:,1], sl_smooth[:,0], 'ro', linewidth=2, label='smoothed SDS')
#plt.legend()
#plt.xlabel('Eastings [m]')
#plt.ylabel('Northings [m]')
#plt.title('Local weighted scatterplot smoothing (LOWESS)')
#plt.draw()

Loading…
Cancel
Save