# -*- coding: utf-8 -*- #==========================================================# # Download L8 images of a given area between given dates #==========================================================# # Initial settings import os import numpy as np import matplotlib.pyplot as plt import pdb import ee # other modules from osgeo import gdal, ogr, osr from urllib.request import urlretrieve import zipfile from datetime import datetime import pytz import pickle # image processing modules import skimage.filters as filters import skimage.exposure as exposure import skimage.transform as transform import sklearn.decomposition as decomposition import skimage.measure as measure # import own modules import functions.utils as utils np.seterr(all='ignore') # raise/ignore divisions by 0 and nans ee.Initialize() def download_tif(image, polygon, bandsId, filepath): """downloads tif image (region and bands) from the ee server and stores it in a temp file""" url = ee.data.makeDownloadUrl(ee.data.getDownloadId({ 'image': image.serialize(), 'region': polygon, 'bands': bandsId, 'filePerBand': 'false', 'name': 'data', })) local_zip, headers = urlretrieve(url) with zipfile.ZipFile(local_zip) as local_zipfile: return local_zipfile.extract('data.tif', filepath) # select collection input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') # Location (Narrabeen all) #polygon = [[[151.3473129272461,-33.69035274454718], # [151.2820816040039,-33.68206818063878], # [151.27281188964844,-33.74775138989556], # [151.3425064086914,-33.75231878701767], # [151.3473129272461,-33.69035274454718]]]; # location (Narrabeen-Collaroy beach) #polygon = [[[151.301454, -33.700754], # [151.311453, -33.702075], # [151.307237, -33.739761], # [151.294220, -33.736329], # [151.301454, -33.700754]]]; # location (Oldbar beach) #polygon = [[[152.664508, -31.896163], # [152.665827, -31.897112], # [152.631516, -31.924846], # [152.629285, -31.923362], # [152.664508, -31.896163]]] # location (Oldbar inlet) #polygon = [[[152.676283, -31.866784], # [152.709174, -31.869993], # [152.678229, -31.892082], # [152.670366, -31.886360], # [152.676283, -31.866784]]]; # Location (Sand Engine) #polygon = [[[4.171742, 52.070455], # [4.223708, 52.069576], # [4.220808, 52.025293], # [4.147749, 52.028861], # [4.171742, 52.070455]]]; # Location (Tairua) #polygon = [[[175.852115, -36.985414], # [175.872797, -36.985145], # [175.873738, -37.000039], # [175.853956, -36.998749], # [175.852115, -36.985414]]]; # Location (Duck) polygon = [[[-75.766220, 36.195928], [-75.748282, 36.196401], [-75.738851, 36.173974], [-75.763546, 36.174249], [-75.766220, 36.195928]]]; # dates start_date = '2013-01-01' end_date = '2019-01-01' # filter by location flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(start_date, end_date) n_img = flt_col.size().getInfo() print('Number of images covering the area:', n_img) im_all = flt_col.getInfo().get('features') satname = 'L8' #sitename = 'NARRA_all' #sitename = 'NARRA' #sitename = 'OLDBAR' #sitename = 'SANDMOTOR' #sitename = 'TAIRUA' sitename = 'DUCK' suffix = '.tif' filepath = os.path.join(os.getcwd(), 'data', satname, sitename) filepath_pan = os.path.join(filepath, 'pan') filepath_ms = os.path.join(filepath, 'ms') all_names_pan = [] all_names_ms = [] timestamps = [] acc_georef = [] # loop through all images for i in range(n_img): # find each image in ee database im = ee.Image(im_all[i].get('id')) im_dic = im.getInfo() im_bands = im_dic.get('bands') im_date = im_dic['properties']['DATE_ACQUIRED'] t = im_dic['properties']['system:time_start'] im_timestamp = datetime.fromtimestamp(t/1000, tz=pytz.utc) timestamps.append(im_timestamp) im_epsg = int(im_dic['bands'][0]['crs'][5:]) try: acc_georef.append(im_dic['properties']['GEOMETRIC_RMSE_MODEL']) except: acc_georef.append(10) print('No geometric rmse model property') # delete dimensions key from dictionnary, otherwise the entire image is extracted for j in range(len(im_bands)): del im_bands[j]['dimensions'] pan_band = [im_bands[7]] ms_bands = [im_bands[1], im_bands[2], im_bands[3], im_bands[4], im_bands[5], im_bands[11]] filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + suffix filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + suffix print(i) if any(filename_pan in _ for _ in all_names_pan): filename_pan = satname + '_' + sitename + '_' + im_date + '_pan' + '_r' + suffix filename_ms = satname + '_' + sitename + '_' + im_date + '_ms' + '_r' + suffix all_names_pan.append(filename_pan) local_data_pan = download_tif(im, polygon, pan_band, filepath_pan) os.rename(local_data_pan, os.path.join(filepath_pan, filename_pan)) local_data_ms = download_tif(im, polygon, ms_bands, filepath_ms) os.rename(local_data_ms, os.path.join(filepath_ms, filename_ms)) with open(os.path.join(filepath, sitename + '_timestamps' + '.pkl'), 'wb') as f: pickle.dump(timestamps, f) with open(os.path.join(filepath, sitename + '_epsgcode' + '.pkl'), 'wb') as f: pickle.dump(im_epsg, f) with open(os.path.join(filepath, sitename + '_accuracy_georef' + '.pkl'), 'wb') as f: pickle.dump(acc_georef, f)