reorganisation

development
kvos 7 years ago
parent 783cd5d033
commit 1093ecfeba

1
.gitignore vendored

@ -1,2 +1,3 @@
*.pyc *.pyc
*.mat *.mat
*.tif

Binary file not shown.

Binary file not shown.

@ -32,7 +32,7 @@ import skimage.measure as measure
# import own modules # import own modules
from functions.utils import * import functions.utils as utils
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize() ee.Initialize()
@ -60,10 +60,10 @@ rect_narra = [[[151.301454, -33.700754],
[151.301454, -33.700754]]]; [151.301454, -33.700754]]];
# dates # dates
start_date = '2016-01-01' #start_date = '2016-01-01'
end_date = '2016-12-31' #end_date = '2016-12-31'
# filter by location # filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra)).filterDate(start_date, end_date) flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
n_img = flt_col.size().getInfo() n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img) print('Number of images covering Narrabeen:', n_img)
@ -105,10 +105,10 @@ for i in range(n_img):
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
all_names_pan.append(filename_pan) all_names_pan.append(filename_pan)
# local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan) local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan)
# os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan)) os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
# local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms) local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms)
# os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms)) os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f: with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f:

@ -0,0 +1,118 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 27 17:12:35 2018
@author: Kilian
"""
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import pdb
import ee
# other modules
from osgeo import gdal, ogr, osr
from urllib.request import urlretrieve
import zipfile
from datetime import datetime
import pytz
import pickle
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
ee.Initialize()
def download_tif(image, polygon, bandsId, filepath):
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
'image': image.serialize(),
'region': polygon,
'bands': bandsId,
'filePerBand': 'false',
'name': 'data',
}))
local_zip, headers = urlretrieve(url)
with zipfile.ZipFile(local_zip) as local_zipfile:
return local_zipfile.extract('data.tif', filepath)
# select collection
input_col = ee.ImageCollection('LANDSAT/LE07/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.301454, -33.700754],
[151.311453, -33.702075],
[151.307237, -33.739761],
[151.294220, -33.736329],
[151.301454, -33.700754]]];
# dates
#start_date = '2016-01-01'
#end_date = '2016-12-31'
# filter by location
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
satname = 'L7'
sitename = 'NARRA'
suffix = '.tif'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
filepath_pan = os.path.join(filepath, 'pan')
filepath_ms = os.path.join(filepath, 'ms')
all_names_pan = []
all_names_ms = []
timestamps = []
# loop through all images
for i in range(n_img):
# find each image in ee database
im = ee.Image(im_all[i].get('id'))
im_dic = im.getInfo()
im_bands = im_dic.get('bands')
im_date = im_dic['properties']['DATE_ACQUIRED']
t = im_dic['properties']['system:time_start']
im_timestamp = datetime.fromtimestamp(t/1000, tz=pytz.utc)
timestamps.append(im_timestamp)
im_epsg = int(im_dic['bands'][0]['crs'][5:])
# delete dimensions key from dictionnary, otherwise the entire image is extracted
for j in range(len(im_bands)): del im_bands[j]['dimensions']
pan_band = [im_bands[7]]
ms_bands = [im_bands[0], im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[9]]
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + suffix
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + suffix
print(i)
if any(filename_pan in _ for _ in all_names_pan):
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + '_r' + suffix
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
all_names_pan.append(filename_pan)
local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan)
os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms)
os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f:
pickle.dump(timestamps, f)
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'wb') as f:
pickle.dump(im_epsg, f)

@ -25,6 +25,7 @@ import skimage.exposure as exposure
import skimage.transform as transform import skimage.transform as transform
import sklearn.decomposition as decomposition import sklearn.decomposition as decomposition
import skimage.measure as measure import skimage.measure as measure
import skimage.morphology as morphology
# import own modules # import own modules
@ -79,7 +80,7 @@ def load_image(image, polygon, bandsId):
bands = [dataset.GetRasterBand(i + 1).ReadAsArray() for i in range(dataset.RasterCount)] bands = [dataset.GetRasterBand(i + 1).ReadAsArray() for i in range(dataset.RasterCount)]
return np.stack(bands, 2), georef return np.stack(bands, 2), georef
def create_cloud_mask(im_qa): def create_cloud_mask(im_qa, satname, plot_bool):
""" """
Creates a cloud mask from the image containing the QA band information Creates a cloud mask from the image containing the QA band information
@ -89,6 +90,10 @@ def create_cloud_mask(im_qa):
----------- -----------
im_qa: np.ndarray im_qa: np.ndarray
Image containing the QA band Image containing the QA band
satname: string
short name for the satellite (L8, L7, S2)
plot_bool: boolean
True if plot is wanted
Returns: Returns:
----------- -----------
@ -97,8 +102,20 @@ def create_cloud_mask(im_qa):
""" """
# convert QA bits # convert QA bits
if satname == 'L8':
cloud_values = [2800, 2804, 2808, 2812, 6896, 6900, 6904, 6908] cloud_values = [2800, 2804, 2808, 2812, 6896, 6900, 6904, 6908]
elif satname == 'L7':
cloud_values = [752, 756, 760, 764]
cloud_mask = np.isin(im_qa, cloud_values) cloud_mask = np.isin(im_qa, cloud_values)
# remove isolated cloud pixels (there are some in the swash and they cause problems)
if sum(sum(cloud_mask)) > 0:
morphology.remove_small_objects(cloud_mask, min_size=5, connectivity=8, in_place=True)
if plot_bool:
plt.figure()
plt.imshow(cloud_mask, cmap='gray')
plt.draw()
#cloud_shadow_values = [2976, 2980, 2984, 2988, 3008, 3012, 3016, 3020] #cloud_shadow_values = [2976, 2980, 2984, 2988, 3008, 3012, 3016, 3020]
#cloud_shadow_mask = np.isin(im_qa, cloud_shadow_values) #cloud_shadow_mask = np.isin(im_qa, cloud_shadow_values)

@ -0,0 +1,136 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 20 16:15:51 2018
@author: z5030440
"""
import scipy.io as sio
import os
import ee
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
import pickle
import pdb
import pytz
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import skimage.morphology as morphology
import skimage.measure as measure
import sklearn.decomposition as decomposition
from scipy import spatial
# my functions
import functions.utils as utils
import functions.sds as sds
#plt.rcParams['axes.grid'] = True
au_tz = pytz.timezone('Australia/Sydney')
# load quadbike dates and convert from datenum to datetime
suffix = '.mat'
dir_name = os.getcwd()
file_name = 'data\quadbike_dates'
file_path = os.path.join(dir_name, file_name + suffix)
quad_dates = sio.loadmat(file_path)['dates']
dt_quad = []
for i in range(quad_dates.shape[0]):
dt_quad.append(datetime(quad_dates[i,0], quad_dates[i,1], quad_dates[i,2], tzinfo=au_tz))
# load satellite datetimes (in UTC) and convert to AEST time
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
# location (Narrabeen-Collaroy beach)
rect_narra = [[[151.3473129272461,-33.69035274454718],
[151.2820816040039,-33.68206818063878],
[151.27281188964844,-33.74775138989556],
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]];
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))
n_img = flt_col.size().getInfo()
print('Number of images covering Narrabeen:', n_img)
im_all = flt_col.getInfo().get('features')
# extract datetimes from image metadata
dt_sat = [_['properties']['system:time_start'] for _ in im_all]
dt_sat = [datetime.fromtimestamp(_/1000, tz=pytz.utc) for _ in dt_sat]
dt_sat = [_.astimezone(au_tz) for _ in dt_sat]
# calculate days difference
diff_days = [ [(x - _).days for _ in dt_quad] for x in dt_sat]
day_thresh = 15
idx = [utils.find_indices(_, lambda e: abs(e) < day_thresh) for _ in diff_days]
dt_diff = []
idx_nogt = []
for i in range(n_img):
if not idx[i]:
idx_nogt.append(i)
continue
dt_diff.append({"sat dt": dt_sat[i],
"quad dt": [dt_quad[_] for _ in idx[i]],
"days diff": [diff_days[i][_] for _ in idx[i]] })
with open('idx_nogt.pkl', 'wb') as f:
pickle.dump(idx_nogt, f)
#%%
dates_sat = mdates.date2num(dt_sat)
dates_quad = mdates.date2num(dt_quad)
plt.figure()
plt.plot_date(dates_sat, np.zeros((n_img,1)))
plt.plot_date(dates_quad, np.ones((len(dates_quad),1)))
plt.show()
data = pd.read_pickle('data_2016.pkl')
dt_sat = [_.astimezone(au_tz) for _ in data['dt']]
[ (_ - dt_sat[0]).days for _ in dt_quad]
dn_sat = []
for i in range(len(dt_sat)): dn_sat.append(dt_sat[i].toordinal())
dn_sat = np.array(dn_sat)
dn_sur = []
for i in range(len(dt_survey)): dn_sur.append(dt_survey[i].toordinal())
dn_sur = np.array(dn_sur)
distances = np.zeros((len(dn_sat),4)).astype('int32')
indexes = np.zeros((len(dn_sat),2)).astype('int32')
for i in range(len(dn_sat)):
distances[i,0] = np.sort(abs(dn_sat[i] - dn_sur))[0]
distances[i,1] = np.sort(abs(dn_sat[i] - dn_sur))[1]
distances[i,2] = dt_sat[i].year
distances[i,3] = dt_sat[i].month
indexes[i,0] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[0])[0][0]
indexes[i,1] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[1])[0][0]
years = [2013, 2014, 2015, 2016]
months = mdates.MonthLocator()
days = mdates.DayLocator()
month_fmt = mdates.DateFormatter('%b')
f, ax = plt.subplots(4, 1)
for i, ca in enumerate(ax):
ca.xaxis.set_major_locator(months)
ca.xaxis.set_major_formatter(month_fmt)
ca.xaxis.set_minor_locator(days)
ca.set_ylabel(str(years[i]))
for j in range(len(dt_sat)):
if dt_sat[j].year == years[i]:
ca.plot(dt_sat[j],0, 'bo', markerfacecolor='b')
#f.subplots_adjust(hspace=0)
#plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False)
plt.plot(dt_survey, np.zeros([len(dt_survey),1]), 'bo')
plt.plot(dt_sat, np.ones([len(dt_sat),1]), 'ro')
plt.yticks([])
plt.show()

@ -31,6 +31,8 @@ import functions.utils as utils
import functions.sds as sds import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize() ee.Initialize()
# initial settings # initial settings
@ -73,7 +75,7 @@ for i in range(N):
im_ms = np.stack(bands, 2) im_ms = np.stack(bands, 2)
# cloud mask # cloud mask
im_qa = im_ms[:,:,5] im_qa = im_ms[:,:,5]
cloud_mask = sds.create_cloud_mask(im_qa) cloud_mask = sds.create_cloud_mask(im_qa, satname, plot_bool)
cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]), cloud_mask = transform.resize(cloud_mask, (im_pan.shape[0], im_pan.shape[1]),
order=0, preserve_range=True, order=0, preserve_range=True,
mode='constant').astype('bool_') mode='constant').astype('bool_')
@ -105,16 +107,19 @@ for i in range(N):
# convert to output epsg spatial reference # convert to output epsg spatial reference
wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg) wl = sds.convert_epsg(wl_coords, input_epsg, output_epsg)
# plt.figure()
# plt.imshow(im_ms_ps[:,:,[2,1,0]])
# for i,contour in enumerate(wl_pix): plt.plot(contour[:, 1], contour[:, 0], linewidth=2)
# plt.axis('image')
# plt.title(file_names_pan[i])
# plt.show()
plt.figure() plt.figure()
centroids = []
cmap = cm.get_cmap('jet') cmap = cm.get_cmap('jet')
plt.subplot(121)
plt.imshow(im_ms_ps[:,:,[2,1,0]])
for j,contour in enumerate(wl_pix):
colours = cmap(np.linspace(0, 1, num=len(wl)))
plt.plot(contour[:, 1], contour[:, 0], linewidth=2, color=colours[j,:])
plt.axis('image')
plt.title(file_names_pan[i])
plt.subplot(122)
centroids = []
for j,contour in enumerate(wl): for j,contour in enumerate(wl):
colours = cmap(np.linspace(0, 1, num=len(wl))) colours = cmap(np.linspace(0, 1, num=len(wl)))
centroids.append([np.mean(contour[:, 0]),np.mean(contour[:, 1])]) centroids.append([np.mean(contour[:, 0]),np.mean(contour[:, 1])])
@ -122,8 +127,18 @@ for i in range(N):
plt.plot(np.mean(contour[:, 0]), np.mean(contour[:, 1]), 'o', color=colours[j,:]) plt.plot(np.mean(contour[:, 0]), np.mean(contour[:, 1]), 'o', color=colours[j,:])
plt.axis('equal') plt.axis('equal')
plt.title(file_names_pan[i]) plt.title(file_names_pan[i])
mng = plt.get_current_fig_manager()
mng.window.showMaximized()
plt.tight_layout()
plt.draw() plt.draw()
pt_in = np.array(ginput(1)) pt_in = np.array(ginput(1))
if pt_in[0][0] < 10000:
print('skipped m ' + str(i))
idx_high_cloud.append(i)
continue
dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids] dist_centroid = [np.linalg.norm(_ - pt_in) for _ in centroids]
shorelines.append(wl[np.argmin(dist_centroid)]) shorelines.append(wl[np.argmin(dist_centroid)])
t.append(timestamps_sorted[i]) t.append(timestamps_sorted[i])
@ -138,3 +153,5 @@ output = {'t':t, 'shorelines':shorelines}
with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'wb') as f: with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'wb') as f:
pickle.dump(output, f) pickle.dump(output, f)

@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 27 17:12:35 2018
@author: Kilian
"""
# Initial settings
import os
import numpy as np
import matplotlib.pyplot as plt
import ee
import pdb
# other modules
from osgeo import gdal, ogr, osr
import pickle
import matplotlib.cm as cm
from pylab import ginput
# image processing modules
import skimage.filters as filters
import skimage.exposure as exposure
import skimage.transform as transform
import sklearn.decomposition as decomposition
import skimage.measure as measure
# import own modules
import functions.utils as utils
import functions.sds as sds
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
ee.Initialize()
# initial settings
cloud_thresh = 0.5 # threshold for cloud cover
plot_bool = False # if you want the plots
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
min_contour_points = 100# minimum number of points contained in each water line
output_epsg = 28356 # GDA94 / MGA Zone 56
satname = 'L7'
sitename = 'NARRA'
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
timestamps = pickle.load(f)
timestamps_sorted = sorted(timestamps)
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
input_epsg = pickle.load(f)
file_path_pan = os.path.join(filepath, 'pan')
file_path_ms = os.path.join(filepath, 'ms')
file_names_pan = os.listdir(file_path_pan)
file_names_ms = os.listdir(file_path_ms)
N = len(file_names_pan)
idx_high_cloud = []
t = []
shorelines = []
for i in range(N):
# read pan image
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
georef = np.array(data.GetGeoTransform())
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
im_pan = np.stack(bands, 2)[:,:,0]
# read ms image
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
im_ms = np.stack(bands, 2)

@ -5,18 +5,18 @@ Created on Tue Mar 20 16:15:51 2018
@author: z5030440 @author: z5030440
""" """
import scipy.io as sio
import os import os
import ee import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import pdb
import ee
import matplotlib.dates as mdates import matplotlib.dates as mdates
import numpy as np import matplotlib.cm as cm
import pandas as pd
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pickle import pickle
import pdb
import pytz import pytz
import scipy.io as sio
# image processing modules # image processing modules
import skimage.filters as filters import skimage.filters as filters
@ -29,108 +29,150 @@ from scipy import spatial
# my functions # my functions
import functions.utils as utils import functions.utils as utils
import functions.sds as sds import functions.sds as sds
#plt.rcParams['axes.grid'] = True
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
plt.rcParams['axes.grid'] = True
plt.rcParams['figure.max_open_warning'] = 100
au_tz = pytz.timezone('Australia/Sydney') au_tz = pytz.timezone('Australia/Sydney')
# load quadbike dates and convert from datenum to datetime # load quadbike dates and convert from datenum to datetime
suffix = '.mat' filename = 'data\quadbike\survey_dates.mat'
dir_name = os.getcwd() filepath = os.path.join(os.getcwd(), filename)
file_name = 'data\quadbike_dates' dates_quad = sio.loadmat(filepath)['dates'] # matrix containing year, month, day
file_path = os.path.join(dir_name, file_name + suffix) dates_quad = [datetime(dates_quad[i,0], dates_quad[i,1], dates_quad[i,2],
quad_dates = sio.loadmat(file_path)['dates'] tzinfo=au_tz) for i in range(dates_quad.shape[0])]
dt_quad = []
for i in range(quad_dates.shape[0]): # load timestamps from satellite images
dt_quad.append(datetime(quad_dates[i,0], quad_dates[i,1], quad_dates[i,2], tzinfo=au_tz)) satname = 'L8'
sitename = 'NARRA'
# load satellite datetimes (in UTC) and convert to AEST time filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') with open(os.path.join(filepath, sitename + '_output' + '.pkl'), 'rb') as f:
# location (Narrabeen-Collaroy beach) output = pickle.load(f)
rect_narra = [[[151.3473129272461,-33.69035274454718], dates_l8 = output['t']
[151.2820816040039,-33.68206818063878], # convert to AEST
[151.27281188964844,-33.74775138989556], dates_l8 = [_.astimezone(au_tz) for _ in dates_l8]
[151.3425064086914,-33.75231878701767],
[151.3473129272461,-33.69035274454718]]]; # load wave data
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra)) filename = 'data\wave\SydneyProcessed.mat'
n_img = flt_col.size().getInfo() filepath = os.path.join(os.getcwd(), filename)
print('Number of images covering Narrabeen:', n_img) wave_data = sio.loadmat(filepath)
im_all = flt_col.getInfo().get('features') idx = utils.find_indices(wave_data['dates'][:,0], lambda e: e >= dates_l8[0].year and e <= dates_l8[-1].year)
hsig = np.array([wave_data['Hsig'][i][0] for i in idx])
# extract datetimes from image metadata wdir = np.array([wave_data['Wdir'][i][0] for i in idx])
dt_sat = [_['properties']['system:time_start'] for _ in im_all] dates_wave = [datetime(wave_data['dates'][i,0], wave_data['dates'][i,1],
dt_sat = [datetime.fromtimestamp(_/1000, tz=pytz.utc) for _ in dt_sat] wave_data['dates'][i,2], wave_data['dates'][i,3],
dt_sat = [_.astimezone(au_tz) for _ in dt_sat] wave_data['dates'][i,4], wave_data['dates'][i,5],
tzinfo=au_tz) for i in idx]
#%% make a plot of all the dates
f = plt.figure()
months = mdates.MonthLocator()
month_fmt = mdates.DateFormatter('%b %Y')
days = mdates.DayLocator()
years = [2013,2014,2015,2016]
for k in range(len(years)):
sel_year = years[k]
ax = plt.subplot(4,1,k+1)
idx_year = utils.find_indices(dates_wave, lambda e : e.year >= sel_year and e.year <= sel_year)
plt.plot([dates_wave[i] for i in idx_year], [hsig[i] for i in idx_year], 'k-', linewidth=0.5)
hsigmax = np.nanmax([hsig[i] for i in idx_year])
cbool = True
for j in range(len(dates_quad)):
if dates_quad[j].year == sel_year:
if cbool:
plt.plot([dates_quad[j], dates_quad[j]], [0, hsigmax], color=[255/255,140/255,0], label='survey')
cbool = False
else:
plt.plot([dates_quad[j], dates_quad[j]], [0, hsigmax], color=[255/255,140/255,0])
cbool = True
for j in range(len(dates_l8)):
if dates_l8[j].year == sel_year:
if cbool:
plt.plot([dates_l8[j], dates_l8[j]], [0, hsigmax], color=[0,191/255,255/255], label='landsat8')
cbool = False
else:
plt.plot([dates_l8[j], dates_l8[j]], [0, hsigmax], color=[0,191/255,255/255])
if k == 3:
plt.legend()
plt.xlim((datetime(sel_year,1,1), datetime(sel_year,12,31, tzinfo=au_tz)))
plt.ylim((0, hsigmax))
plt.ylabel('Hs [m]')
ax.xaxis.set_major_locator = months
ax.xaxis.set_major_formatter(month_fmt)
f.subplots_adjust(hspace=0.2)
plt.draw()
#%%
# calculate days difference # calculate days difference
diff_days = [ [(x - _).days for _ in dt_quad] for x in dt_sat] diff_days = [ [(x - _).days for _ in dates_quad] for x in dates_l8]
day_thresh = 15 max_diff = 5
idx = [utils.find_indices(_, lambda e: abs(e) < day_thresh) for _ in diff_days] idx_closest = [utils.find_indices(_, lambda e: abs(e) <= max_diff) for _ in diff_days]
dates_diff = []
dt_diff = [] for i in range(len(idx_closest)):
idx_nogt = [] if not idx_closest[i]:
for i in range(n_img):
if not idx[i]:
idx_nogt.append(i)
continue continue
dt_diff.append({"sat dt": dt_sat[i], elif len(idx_closest[i]) > 1:
"quad dt": [dt_quad[_] for _ in idx[i]], idx_best = np.argmin(np.abs([diff_days[i][_] for _ in idx_closest[i]]))
"days diff": [diff_days[i][_] for _ in idx[i]] }) dates_temp = [dates_quad[_] for _ in idx_closest[i]]
days_temp = [diff_days[i][_] for _ in idx_closest[i]]
with open('idx_nogt.pkl', 'wb') as f: dates_diff.append({"date sat": dates_l8[i],
pickle.dump(idx_nogt, f) "date quad": dates_temp[idx_best],
"days diff": days_temp[idx_best]})
else:
dates_diff.append({"date sat": dates_l8[i],
"date quad": dates_quad[idx_closest[i][0]],
"days diff": diff_days[i][idx_closest[i][0]]
})
np.mean([ np.abs(_['days diff']) for _ in dates_diff])
#%% #%%
dates_sat = mdates.date2num(dt_sat)
dates_quad = mdates.date2num(dt_quad)
plt.figure()
plt.plot_date(dates_sat, np.zeros((n_img,1)))
plt.plot_date(dates_quad, np.ones((len(dates_quad),1)))
plt.show()
data = pd.read_pickle('data_2016.pkl')
dt_sat = [_.astimezone(au_tz) for _ in data['dt']] # load quadbike .mat files
foldername = 'data\quadbike\surveys3D'
folderpath = os.path.join(os.getcwd(), foldername)
filenames = os.listdir(folderpath)
[ (_ - dt_sat[0]).days for _ in dt_quad] # load the satellite shorelines
sl = output['shorelines']
dates_quad = [datetime(int(_[6:10]), int(_[11:13]), int(_[14:16]), tzinfo= au_tz) for _ in filenames]
# for each satellite shoreline, load the corresponding 3D survey
for i in range(len(dates_diff)):
idx_closest = np.argmin(np.abs(np.array([(dates_diff[i]['date sat'] - _).days for _ in dates_quad])))
survey3d = sio.loadmat(os.path.join(folderpath, filenames[idx_closest]))
plt.figure()
plt.axis('equal')
plt.scatter(survey3d['x'], survey3d['y'], s=10, c=survey3d['z'], marker='o', cmap=cm.get_cmap('jet'),
label='quad data')
plt.plot(sl[i][:,0], sl[i][:,1], 'ko')
plt.draw()
dn_sat = [] import statsmodels.api as sm
for i in range(len(dt_sat)): dn_sat.append(dt_sat[i].toordinal()) lowess = sm.nonparametric.lowess
dn_sat = np.array(dn_sat)
dn_sur = []
for i in range(len(dt_survey)): dn_sur.append(dt_survey[i].toordinal())
dn_sur = np.array(dn_sur)
distances = np.zeros((len(dn_sat),4)).astype('int32')
indexes = np.zeros((len(dn_sat),2)).astype('int32')
for i in range(len(dn_sat)):
distances[i,0] = np.sort(abs(dn_sat[i] - dn_sur))[0]
distances[i,1] = np.sort(abs(dn_sat[i] - dn_sur))[1]
distances[i,2] = dt_sat[i].year
distances[i,3] = dt_sat[i].month
indexes[i,0] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[0])[0][0]
indexes[i,1] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[1])[0][0]
years = [2013, 2014, 2015, 2016] # For the 1D case:
months = mdates.MonthLocator() x = sl[i][:,0]
days = mdates.DayLocator() y = sl[i][:,1]
month_fmt = mdates.DateFormatter('%b') x0 = x
f, ax = plt.subplots(4, 1) f_hat = lo.lowess(x, y, x)
for i, ca in enumerate(ax): fig,ax = plt.subplots(1)
ca.xaxis.set_major_locator(months) ax.scatter(x,y)
ca.xaxis.set_major_formatter(month_fmt) ax.plot(x0,f_hat,'ro')
ca.xaxis.set_minor_locator(days)
ca.set_ylabel(str(years[i]))
for j in range(len(dt_sat)):
if dt_sat[j].year == years[i]:
ca.plot(dt_sat[j],0, 'bo', markerfacecolor='b')
#f.subplots_adjust(hspace=0)
#plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False)
plt.plot(dt_survey, np.zeros([len(dt_survey),1]), 'bo')
plt.plot(dt_sat, np.ones([len(dt_sat),1]), 'ro')
plt.yticks([])
plt.show() plt.show()
# 2D case (and more...)
x = np.random.randn(2, 100)
f = -1 * np.sin(x[0]) + 0.5 * np.cos(x[1]) + 0.2*np.random.randn(100)
x0 = np.mgrid[-1:1:.1, -1:1:.1]
x0 = np.vstack([x0[0].ravel(), x0[1].ravel()])
f_hat = lo.lowess(x, f, x0, kernel=lo.tri_cube)
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x[0], x[1], f)
ax.scatter(x0[0], x0[1], f_hat, color='r')

Loading…
Cancel
Save