forked from kilianv/CoastSat_WRL
reorganisation
parent
783cd5d033
commit
1093ecfeba
@ -1,2 +1,3 @@
|
||||
*.pyc
|
||||
*.mat
|
||||
*.tif
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,118 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Tue Mar 27 17:12:35 2018
|
||||
|
||||
@author: Kilian
|
||||
"""
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import pdb
|
||||
import ee
|
||||
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
from urllib.request import urlretrieve
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
import pickle
|
||||
|
||||
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
ee.Initialize()
|
||||
|
||||
def download_tif(image, polygon, bandsId, filepath):
|
||||
"""downloads tif image (region and bands) from the ee server and stores it in a temp file"""
|
||||
url = ee.data.makeDownloadUrl(ee.data.getDownloadId({
|
||||
'image': image.serialize(),
|
||||
'region': polygon,
|
||||
'bands': bandsId,
|
||||
'filePerBand': 'false',
|
||||
'name': 'data',
|
||||
}))
|
||||
local_zip, headers = urlretrieve(url)
|
||||
with zipfile.ZipFile(local_zip) as local_zipfile:
|
||||
return local_zipfile.extract('data.tif', filepath)
|
||||
|
||||
# select collection
|
||||
input_col = ee.ImageCollection('LANDSAT/LE07/C01/T1_RT_TOA')
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
rect_narra = [[[151.301454, -33.700754],
|
||||
[151.311453, -33.702075],
|
||||
[151.307237, -33.739761],
|
||||
[151.294220, -33.736329],
|
||||
[151.301454, -33.700754]]];
|
||||
|
||||
# dates
|
||||
#start_date = '2016-01-01'
|
||||
#end_date = '2016-12-31'
|
||||
# filter by location
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))#.filterDate(start_date, end_date)
|
||||
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering Narrabeen:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
satname = 'L7'
|
||||
sitename = 'NARRA'
|
||||
suffix = '.tif'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
filepath_pan = os.path.join(filepath, 'pan')
|
||||
filepath_ms = os.path.join(filepath, 'ms')
|
||||
|
||||
all_names_pan = []
|
||||
all_names_ms = []
|
||||
timestamps = []
|
||||
# loop through all images
|
||||
for i in range(n_img):
|
||||
# find each image in ee database
|
||||
im = ee.Image(im_all[i].get('id'))
|
||||
im_dic = im.getInfo()
|
||||
im_bands = im_dic.get('bands')
|
||||
im_date = im_dic['properties']['DATE_ACQUIRED']
|
||||
t = im_dic['properties']['system:time_start']
|
||||
im_timestamp = datetime.fromtimestamp(t/1000, tz=pytz.utc)
|
||||
timestamps.append(im_timestamp)
|
||||
im_epsg = int(im_dic['bands'][0]['crs'][5:])
|
||||
|
||||
# delete dimensions key from dictionnary, otherwise the entire image is extracted
|
||||
for j in range(len(im_bands)): del im_bands[j]['dimensions']
|
||||
pan_band = [im_bands[7]]
|
||||
ms_bands = [im_bands[0], im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[9]]
|
||||
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + suffix
|
||||
|
||||
print(i)
|
||||
if any(filename_pan in _ for _ in all_names_pan):
|
||||
filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + '_r' + suffix
|
||||
filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix
|
||||
all_names_pan.append(filename_pan)
|
||||
|
||||
local_data_pan = download_tif(im, rect_narra, pan_band, filepath_pan)
|
||||
os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan))
|
||||
local_data_ms = download_tif(im, rect_narra, ms_bands, filepath_ms)
|
||||
os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms))
|
||||
|
||||
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(timestamps, f)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'wb') as f:
|
||||
pickle.dump(im_epsg, f)
|
||||
|
@ -0,0 +1,136 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Tue Mar 20 16:15:51 2018
|
||||
|
||||
@author: z5030440
|
||||
"""
|
||||
|
||||
import scipy.io as sio
|
||||
import os
|
||||
import ee
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.dates as mdates
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
import pickle
|
||||
import pdb
|
||||
import pytz
|
||||
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import skimage.morphology as morphology
|
||||
import skimage.measure as measure
|
||||
import sklearn.decomposition as decomposition
|
||||
from scipy import spatial
|
||||
# my functions
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
#plt.rcParams['axes.grid'] = True
|
||||
au_tz = pytz.timezone('Australia/Sydney')
|
||||
|
||||
# load quadbike dates and convert from datenum to datetime
|
||||
suffix = '.mat'
|
||||
dir_name = os.getcwd()
|
||||
file_name = 'data\quadbike_dates'
|
||||
file_path = os.path.join(dir_name, file_name + suffix)
|
||||
quad_dates = sio.loadmat(file_path)['dates']
|
||||
dt_quad = []
|
||||
for i in range(quad_dates.shape[0]):
|
||||
dt_quad.append(datetime(quad_dates[i,0], quad_dates[i,1], quad_dates[i,2], tzinfo=au_tz))
|
||||
|
||||
# load satellite datetimes (in UTC) and convert to AEST time
|
||||
input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
||||
# location (Narrabeen-Collaroy beach)
|
||||
rect_narra = [[[151.3473129272461,-33.69035274454718],
|
||||
[151.2820816040039,-33.68206818063878],
|
||||
[151.27281188964844,-33.74775138989556],
|
||||
[151.3425064086914,-33.75231878701767],
|
||||
[151.3473129272461,-33.69035274454718]]];
|
||||
flt_col = input_col.filterBounds(ee.Geometry.Polygon(rect_narra))
|
||||
n_img = flt_col.size().getInfo()
|
||||
print('Number of images covering Narrabeen:', n_img)
|
||||
im_all = flt_col.getInfo().get('features')
|
||||
|
||||
# extract datetimes from image metadata
|
||||
dt_sat = [_['properties']['system:time_start'] for _ in im_all]
|
||||
dt_sat = [datetime.fromtimestamp(_/1000, tz=pytz.utc) for _ in dt_sat]
|
||||
dt_sat = [_.astimezone(au_tz) for _ in dt_sat]
|
||||
# calculate days difference
|
||||
diff_days = [ [(x - _).days for _ in dt_quad] for x in dt_sat]
|
||||
day_thresh = 15
|
||||
idx = [utils.find_indices(_, lambda e: abs(e) < day_thresh) for _ in diff_days]
|
||||
|
||||
dt_diff = []
|
||||
idx_nogt = []
|
||||
for i in range(n_img):
|
||||
if not idx[i]:
|
||||
idx_nogt.append(i)
|
||||
continue
|
||||
dt_diff.append({"sat dt": dt_sat[i],
|
||||
"quad dt": [dt_quad[_] for _ in idx[i]],
|
||||
"days diff": [diff_days[i][_] for _ in idx[i]] })
|
||||
|
||||
with open('idx_nogt.pkl', 'wb') as f:
|
||||
pickle.dump(idx_nogt, f)
|
||||
|
||||
|
||||
#%%
|
||||
dates_sat = mdates.date2num(dt_sat)
|
||||
dates_quad = mdates.date2num(dt_quad)
|
||||
plt.figure()
|
||||
plt.plot_date(dates_sat, np.zeros((n_img,1)))
|
||||
plt.plot_date(dates_quad, np.ones((len(dates_quad),1)))
|
||||
plt.show()
|
||||
|
||||
data = pd.read_pickle('data_2016.pkl')
|
||||
|
||||
dt_sat = [_.astimezone(au_tz) for _ in data['dt']]
|
||||
|
||||
[ (_ - dt_sat[0]).days for _ in dt_quad]
|
||||
|
||||
|
||||
|
||||
dn_sat = []
|
||||
for i in range(len(dt_sat)): dn_sat.append(dt_sat[i].toordinal())
|
||||
dn_sat = np.array(dn_sat)
|
||||
dn_sur = []
|
||||
for i in range(len(dt_survey)): dn_sur.append(dt_survey[i].toordinal())
|
||||
dn_sur = np.array(dn_sur)
|
||||
|
||||
distances = np.zeros((len(dn_sat),4)).astype('int32')
|
||||
indexes = np.zeros((len(dn_sat),2)).astype('int32')
|
||||
for i in range(len(dn_sat)):
|
||||
distances[i,0] = np.sort(abs(dn_sat[i] - dn_sur))[0]
|
||||
distances[i,1] = np.sort(abs(dn_sat[i] - dn_sur))[1]
|
||||
distances[i,2] = dt_sat[i].year
|
||||
distances[i,3] = dt_sat[i].month
|
||||
indexes[i,0] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[0])[0][0]
|
||||
indexes[i,1] = np.where(abs(dn_sat[i] - dn_sur) == np.sort(abs(dn_sat[i] - dn_sur))[1])[0][0]
|
||||
|
||||
|
||||
years = [2013, 2014, 2015, 2016]
|
||||
months = mdates.MonthLocator()
|
||||
days = mdates.DayLocator()
|
||||
month_fmt = mdates.DateFormatter('%b')
|
||||
f, ax = plt.subplots(4, 1)
|
||||
for i, ca in enumerate(ax):
|
||||
ca.xaxis.set_major_locator(months)
|
||||
ca.xaxis.set_major_formatter(month_fmt)
|
||||
ca.xaxis.set_minor_locator(days)
|
||||
ca.set_ylabel(str(years[i]))
|
||||
for j in range(len(dt_sat)):
|
||||
if dt_sat[j].year == years[i]:
|
||||
ca.plot(dt_sat[j],0, 'bo', markerfacecolor='b')
|
||||
#f.subplots_adjust(hspace=0)
|
||||
#plt.setp([a.get_xticklabels() for a in f.axes[:-1]], visible=False)
|
||||
|
||||
|
||||
plt.plot(dt_survey, np.zeros([len(dt_survey),1]), 'bo')
|
||||
plt.plot(dt_sat, np.ones([len(dt_sat),1]), 'ro')
|
||||
plt.yticks([])
|
||||
plt.show()
|
||||
|
@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Tue Mar 27 17:12:35 2018
|
||||
|
||||
@author: Kilian
|
||||
"""
|
||||
|
||||
# Initial settings
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import ee
|
||||
import pdb
|
||||
|
||||
# other modules
|
||||
from osgeo import gdal, ogr, osr
|
||||
import pickle
|
||||
import matplotlib.cm as cm
|
||||
from pylab import ginput
|
||||
|
||||
# image processing modules
|
||||
import skimage.filters as filters
|
||||
import skimage.exposure as exposure
|
||||
import skimage.transform as transform
|
||||
import sklearn.decomposition as decomposition
|
||||
import skimage.measure as measure
|
||||
|
||||
|
||||
# import own modules
|
||||
import functions.utils as utils
|
||||
import functions.sds as sds
|
||||
|
||||
np.seterr(all='ignore') # raise/ignore divisions by 0 and nans
|
||||
plt.rcParams['axes.grid'] = True
|
||||
plt.rcParams['figure.max_open_warning'] = 100
|
||||
ee.Initialize()
|
||||
|
||||
# initial settings
|
||||
cloud_thresh = 0.5 # threshold for cloud cover
|
||||
plot_bool = False # if you want the plots
|
||||
prob_high = 99.9 # upper probability to clip and rescale pixel intensity
|
||||
min_contour_points = 100# minimum number of points contained in each water line
|
||||
output_epsg = 28356 # GDA94 / MGA Zone 56
|
||||
|
||||
satname = 'L7'
|
||||
sitename = 'NARRA'
|
||||
filepath = os.path.join(os.getcwd(), 'data', satname, sitename)
|
||||
with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'rb') as f:
|
||||
timestamps = pickle.load(f)
|
||||
timestamps_sorted = sorted(timestamps)
|
||||
with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'rb') as f:
|
||||
input_epsg = pickle.load(f)
|
||||
|
||||
file_path_pan = os.path.join(filepath, 'pan')
|
||||
file_path_ms = os.path.join(filepath, 'ms')
|
||||
file_names_pan = os.listdir(file_path_pan)
|
||||
file_names_ms = os.listdir(file_path_ms)
|
||||
N = len(file_names_pan)
|
||||
idx_high_cloud = []
|
||||
t = []
|
||||
shorelines = []
|
||||
|
||||
for i in range(N):
|
||||
# read pan image
|
||||
fn_pan = os.path.join(file_path_pan, file_names_pan[i])
|
||||
data = gdal.Open(fn_pan, gdal.GA_ReadOnly)
|
||||
georef = np.array(data.GetGeoTransform())
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_pan = np.stack(bands, 2)[:,:,0]
|
||||
# read ms image
|
||||
fn_ms = os.path.join(file_path_ms, file_names_ms[i])
|
||||
data = gdal.Open(fn_ms, gdal.GA_ReadOnly)
|
||||
bands = [data.GetRasterBand(i + 1).ReadAsArray() for i in range(data.RasterCount)]
|
||||
im_ms = np.stack(bands, 2)
|
||||
|
Loading…
Reference in New Issue