major updates
parent
2e3b90316f
commit
a0b49c7dcf
@ -0,0 +1,62 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<kml xmlns="http://www.opengis.net/kml/2.2">
|
||||||
|
<Document>
|
||||||
|
<name>NARRA</name>
|
||||||
|
<Style id="poly-000000-1200-77-nodesc-normal">
|
||||||
|
<LineStyle>
|
||||||
|
<color>ff000000</color>
|
||||||
|
<width>1.2</width>
|
||||||
|
</LineStyle>
|
||||||
|
<PolyStyle>
|
||||||
|
<color>4d000000</color>
|
||||||
|
<fill>1</fill>
|
||||||
|
<outline>1</outline>
|
||||||
|
</PolyStyle>
|
||||||
|
<BalloonStyle>
|
||||||
|
<text><![CDATA[<h3>$[name]</h3>]]></text>
|
||||||
|
</BalloonStyle>
|
||||||
|
</Style>
|
||||||
|
<Style id="poly-000000-1200-77-nodesc-highlight">
|
||||||
|
<LineStyle>
|
||||||
|
<color>ff000000</color>
|
||||||
|
<width>1.8</width>
|
||||||
|
</LineStyle>
|
||||||
|
<PolyStyle>
|
||||||
|
<color>4d000000</color>
|
||||||
|
<fill>1</fill>
|
||||||
|
<outline>1</outline>
|
||||||
|
</PolyStyle>
|
||||||
|
<BalloonStyle>
|
||||||
|
<text><![CDATA[<h3>$[name]</h3>]]></text>
|
||||||
|
</BalloonStyle>
|
||||||
|
</Style>
|
||||||
|
<StyleMap id="poly-000000-1200-77-nodesc">
|
||||||
|
<Pair>
|
||||||
|
<key>normal</key>
|
||||||
|
<styleUrl>#poly-000000-1200-77-nodesc-normal</styleUrl>
|
||||||
|
</Pair>
|
||||||
|
<Pair>
|
||||||
|
<key>highlight</key>
|
||||||
|
<styleUrl>#poly-000000-1200-77-nodesc-highlight</styleUrl>
|
||||||
|
</Pair>
|
||||||
|
</StyleMap>
|
||||||
|
<Placemark>
|
||||||
|
<name>Polygon 1</name>
|
||||||
|
<styleUrl>#poly-000000-1200-77-nodesc</styleUrl>
|
||||||
|
<Polygon>
|
||||||
|
<outerBoundaryIs>
|
||||||
|
<LinearRing>
|
||||||
|
<tessellate>1</tessellate>
|
||||||
|
<coordinates>
|
||||||
|
151.2957545,-33.7012561,0
|
||||||
|
151.297557,-33.7388075,0
|
||||||
|
151.312234,-33.7390216,0
|
||||||
|
151.311204,-33.701399,0
|
||||||
|
151.2957545,-33.7012561,0
|
||||||
|
</coordinates>
|
||||||
|
</LinearRing>
|
||||||
|
</outerBoundaryIs>
|
||||||
|
</Polygon>
|
||||||
|
</Placemark>
|
||||||
|
</Document>
|
||||||
|
</kml>
|
@ -0,0 +1,540 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
###############################################################################
|
||||||
|
# $Id$
|
||||||
|
#
|
||||||
|
# Project: InSAR Peppers
|
||||||
|
# Purpose: Module to extract data from many rasters into one output.
|
||||||
|
# Author: Frank Warmerdam, warmerdam@pobox.com
|
||||||
|
#
|
||||||
|
###############################################################################
|
||||||
|
# Copyright (c) 2000, Atlantis Scientific Inc. (www.atlsci.com)
|
||||||
|
# Copyright (c) 2009-2011, Even Rouault <even dot rouault at mines-paris dot org>
|
||||||
|
#
|
||||||
|
# This library is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU Library General Public
|
||||||
|
# License as published by the Free Software Foundation; either
|
||||||
|
# version 2 of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# Library General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Library General Public
|
||||||
|
# License along with this library; if not, write to the
|
||||||
|
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||||
|
# Boston, MA 02111-1307, USA.
|
||||||
|
###############################################################################
|
||||||
|
# changes 29Apr2011
|
||||||
|
# If the input image is a multi-band one, use all the channels in
|
||||||
|
# building the stack.
|
||||||
|
# anssi.pekkarinen@fao.org
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
try:
|
||||||
|
progress = gdal.TermProgress_nocb
|
||||||
|
except:
|
||||||
|
progress = gdal.TermProgress
|
||||||
|
|
||||||
|
__version__ = '$id$'[5:-1]
|
||||||
|
verbose = 0
|
||||||
|
quiet = 0
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
def raster_copy( s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
|
||||||
|
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
|
||||||
|
nodata=None ):
|
||||||
|
|
||||||
|
if verbose != 0:
|
||||||
|
print('Copy %d,%d,%d,%d to %d,%d,%d,%d.'
|
||||||
|
% (s_xoff, s_yoff, s_xsize, s_ysize,
|
||||||
|
t_xoff, t_yoff, t_xsize, t_ysize ))
|
||||||
|
|
||||||
|
if nodata is not None:
|
||||||
|
return raster_copy_with_nodata(
|
||||||
|
s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
|
||||||
|
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
|
||||||
|
nodata )
|
||||||
|
|
||||||
|
s_band = s_fh.GetRasterBand( s_band_n )
|
||||||
|
m_band = None
|
||||||
|
# Works only in binary mode and doesn't take into account
|
||||||
|
# intermediate transparency values for compositing.
|
||||||
|
if s_band.GetMaskFlags() != gdal.GMF_ALL_VALID:
|
||||||
|
m_band = s_band.GetMaskBand()
|
||||||
|
elif s_band.GetColorInterpretation() == gdal.GCI_AlphaBand:
|
||||||
|
m_band = s_band
|
||||||
|
if m_band is not None:
|
||||||
|
return raster_copy_with_mask(
|
||||||
|
s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
|
||||||
|
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
|
||||||
|
m_band )
|
||||||
|
|
||||||
|
s_band = s_fh.GetRasterBand( s_band_n )
|
||||||
|
t_band = t_fh.GetRasterBand( t_band_n )
|
||||||
|
|
||||||
|
data = s_band.ReadRaster( s_xoff, s_yoff, s_xsize, s_ysize,
|
||||||
|
t_xsize, t_ysize, t_band.DataType )
|
||||||
|
t_band.WriteRaster( t_xoff, t_yoff, t_xsize, t_ysize,
|
||||||
|
data, t_xsize, t_ysize, t_band.DataType )
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
def raster_copy_with_nodata( s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
|
||||||
|
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
|
||||||
|
nodata ):
|
||||||
|
try:
|
||||||
|
import numpy as Numeric
|
||||||
|
except ImportError:
|
||||||
|
import Numeric
|
||||||
|
|
||||||
|
s_band = s_fh.GetRasterBand( s_band_n )
|
||||||
|
t_band = t_fh.GetRasterBand( t_band_n )
|
||||||
|
|
||||||
|
data_src = s_band.ReadAsArray( s_xoff, s_yoff, s_xsize, s_ysize,
|
||||||
|
t_xsize, t_ysize )
|
||||||
|
data_dst = t_band.ReadAsArray( t_xoff, t_yoff, t_xsize, t_ysize )
|
||||||
|
|
||||||
|
nodata_test = Numeric.equal(data_src,nodata)
|
||||||
|
to_write = Numeric.choose( nodata_test, (data_src, data_dst) )
|
||||||
|
|
||||||
|
t_band.WriteArray( to_write, t_xoff, t_yoff )
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
def raster_copy_with_mask( s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
|
||||||
|
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
|
||||||
|
m_band ):
|
||||||
|
try:
|
||||||
|
import numpy as Numeric
|
||||||
|
except ImportError:
|
||||||
|
import Numeric
|
||||||
|
|
||||||
|
s_band = s_fh.GetRasterBand( s_band_n )
|
||||||
|
t_band = t_fh.GetRasterBand( t_band_n )
|
||||||
|
|
||||||
|
data_src = s_band.ReadAsArray( s_xoff, s_yoff, s_xsize, s_ysize,
|
||||||
|
t_xsize, t_ysize )
|
||||||
|
data_mask = m_band.ReadAsArray( s_xoff, s_yoff, s_xsize, s_ysize,
|
||||||
|
t_xsize, t_ysize )
|
||||||
|
data_dst = t_band.ReadAsArray( t_xoff, t_yoff, t_xsize, t_ysize )
|
||||||
|
|
||||||
|
mask_test = Numeric.equal(data_mask, 0)
|
||||||
|
to_write = Numeric.choose( mask_test, (data_src, data_dst) )
|
||||||
|
|
||||||
|
t_band.WriteArray( to_write, t_xoff, t_yoff )
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
def names_to_fileinfos( names ):
|
||||||
|
"""
|
||||||
|
Translate a list of GDAL filenames, into file_info objects.
|
||||||
|
|
||||||
|
names -- list of valid GDAL dataset names.
|
||||||
|
|
||||||
|
Returns a list of file_info objects. There may be less file_info objects
|
||||||
|
than names if some of the names could not be opened as GDAL files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
file_infos = []
|
||||||
|
for name in names:
|
||||||
|
fi = file_info()
|
||||||
|
if fi.init_from_name( name ) == 1:
|
||||||
|
file_infos.append( fi )
|
||||||
|
|
||||||
|
return file_infos
|
||||||
|
|
||||||
|
# *****************************************************************************
|
||||||
|
class file_info:
|
||||||
|
"""A class holding information about a GDAL file."""
|
||||||
|
|
||||||
|
def init_from_name(self, filename):
|
||||||
|
"""
|
||||||
|
Initialize file_info from filename
|
||||||
|
|
||||||
|
filename -- Name of file to read.
|
||||||
|
|
||||||
|
Returns 1 on success or 0 if the file can't be opened.
|
||||||
|
"""
|
||||||
|
fh = gdal.Open( filename )
|
||||||
|
if fh is None:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.filename = filename
|
||||||
|
self.bands = fh.RasterCount
|
||||||
|
self.xsize = fh.RasterXSize
|
||||||
|
self.ysize = fh.RasterYSize
|
||||||
|
self.band_type = fh.GetRasterBand(1).DataType
|
||||||
|
self.projection = fh.GetProjection()
|
||||||
|
self.geotransform = fh.GetGeoTransform()
|
||||||
|
self.ulx = self.geotransform[0]
|
||||||
|
self.uly = self.geotransform[3]
|
||||||
|
self.lrx = self.ulx + self.geotransform[1] * self.xsize
|
||||||
|
self.lry = self.uly + self.geotransform[5] * self.ysize
|
||||||
|
|
||||||
|
ct = fh.GetRasterBand(1).GetRasterColorTable()
|
||||||
|
if ct is not None:
|
||||||
|
self.ct = ct.Clone()
|
||||||
|
else:
|
||||||
|
self.ct = None
|
||||||
|
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def report( self ):
|
||||||
|
print('Filename: '+ self.filename)
|
||||||
|
print('File Size: %dx%dx%d'
|
||||||
|
% (self.xsize, self.ysize, self.bands))
|
||||||
|
print('Pixel Size: %f x %f'
|
||||||
|
% (self.geotransform[1],self.geotransform[5]))
|
||||||
|
print('UL:(%f,%f) LR:(%f,%f)'
|
||||||
|
% (self.ulx,self.uly,self.lrx,self.lry))
|
||||||
|
|
||||||
|
def copy_into( self, t_fh, s_band = 1, t_band = 1, nodata_arg=None ):
|
||||||
|
"""
|
||||||
|
Copy this files image into target file.
|
||||||
|
|
||||||
|
This method will compute the overlap area of the file_info objects
|
||||||
|
file, and the target gdal.Dataset object, and copy the image data
|
||||||
|
for the common window area. It is assumed that the files are in
|
||||||
|
a compatible projection ... no checking or warping is done. However,
|
||||||
|
if the destination file is a different resolution, or different
|
||||||
|
image pixel type, the appropriate resampling and conversions will
|
||||||
|
be done (using normal GDAL promotion/demotion rules).
|
||||||
|
|
||||||
|
t_fh -- gdal.Dataset object for the file into which some or all
|
||||||
|
of this file may be copied.
|
||||||
|
|
||||||
|
Returns 1 on success (or if nothing needs to be copied), and zero one
|
||||||
|
failure.
|
||||||
|
"""
|
||||||
|
t_geotransform = t_fh.GetGeoTransform()
|
||||||
|
t_ulx = t_geotransform[0]
|
||||||
|
t_uly = t_geotransform[3]
|
||||||
|
t_lrx = t_geotransform[0] + t_fh.RasterXSize * t_geotransform[1]
|
||||||
|
t_lry = t_geotransform[3] + t_fh.RasterYSize * t_geotransform[5]
|
||||||
|
|
||||||
|
# figure out intersection region
|
||||||
|
tgw_ulx = max(t_ulx,self.ulx)
|
||||||
|
tgw_lrx = min(t_lrx,self.lrx)
|
||||||
|
if t_geotransform[5] < 0:
|
||||||
|
tgw_uly = min(t_uly,self.uly)
|
||||||
|
tgw_lry = max(t_lry,self.lry)
|
||||||
|
else:
|
||||||
|
tgw_uly = max(t_uly,self.uly)
|
||||||
|
tgw_lry = min(t_lry,self.lry)
|
||||||
|
|
||||||
|
# do they even intersect?
|
||||||
|
if tgw_ulx >= tgw_lrx:
|
||||||
|
return 1
|
||||||
|
if t_geotransform[5] < 0 and tgw_uly <= tgw_lry:
|
||||||
|
return 1
|
||||||
|
if t_geotransform[5] > 0 and tgw_uly >= tgw_lry:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# compute target window in pixel coordinates.
|
||||||
|
tw_xoff = int((tgw_ulx - t_geotransform[0]) / t_geotransform[1] + 0.1)
|
||||||
|
tw_yoff = int((tgw_uly - t_geotransform[3]) / t_geotransform[5] + 0.1)
|
||||||
|
tw_xsize = int((tgw_lrx - t_geotransform[0])/t_geotransform[1] + 0.5) \
|
||||||
|
- tw_xoff
|
||||||
|
tw_ysize = int((tgw_lry - t_geotransform[3])/t_geotransform[5] + 0.5) \
|
||||||
|
- tw_yoff
|
||||||
|
|
||||||
|
if tw_xsize < 1 or tw_ysize < 1:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Compute source window in pixel coordinates.
|
||||||
|
sw_xoff = int((tgw_ulx - self.geotransform[0]) / self.geotransform[1])
|
||||||
|
sw_yoff = int((tgw_uly - self.geotransform[3]) / self.geotransform[5])
|
||||||
|
sw_xsize = int((tgw_lrx - self.geotransform[0]) \
|
||||||
|
/ self.geotransform[1] + 0.5) - sw_xoff
|
||||||
|
sw_ysize = int((tgw_lry - self.geotransform[3]) \
|
||||||
|
/ self.geotransform[5] + 0.5) - sw_yoff
|
||||||
|
|
||||||
|
if sw_xsize < 1 or sw_ysize < 1:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Open the source file, and copy the selected region.
|
||||||
|
s_fh = gdal.Open( self.filename )
|
||||||
|
|
||||||
|
return raster_copy( s_fh, sw_xoff, sw_yoff, sw_xsize, sw_ysize, s_band,
|
||||||
|
t_fh, tw_xoff, tw_yoff, tw_xsize, tw_ysize, t_band,
|
||||||
|
nodata_arg )
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
def Usage():
|
||||||
|
print('Usage: gdal_merge.py [-o out_filename] [-of out_format] [-co NAME=VALUE]*')
|
||||||
|
print(' [-ps pixelsize_x pixelsize_y] [-tap] [-separate] [-q] [-v] [-pct]')
|
||||||
|
print(' [-ul_lr ulx uly lrx lry] [-init "value [value...]"]')
|
||||||
|
print(' [-n nodata_value] [-a_nodata output_nodata_value]')
|
||||||
|
print(' [-ot datatype] [-createonly] input_files')
|
||||||
|
print(' [--help-general]')
|
||||||
|
print('')
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# Program mainline.
|
||||||
|
#
|
||||||
|
|
||||||
|
def main( argv=None ):
|
||||||
|
|
||||||
|
global verbose, quiet
|
||||||
|
verbose = 0
|
||||||
|
quiet = 0
|
||||||
|
names = []
|
||||||
|
format = 'GTiff'
|
||||||
|
out_file = 'out.tif'
|
||||||
|
|
||||||
|
ulx = None
|
||||||
|
psize_x = None
|
||||||
|
separate = 0
|
||||||
|
copy_pct = 0
|
||||||
|
nodata = None
|
||||||
|
a_nodata = None
|
||||||
|
create_options = []
|
||||||
|
pre_init = []
|
||||||
|
band_type = None
|
||||||
|
createonly = 0
|
||||||
|
bTargetAlignedPixels = False
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
gdal.AllRegister()
|
||||||
|
if argv is None:
|
||||||
|
argv = sys.argv
|
||||||
|
argv = gdal.GeneralCmdLineProcessor( argv )
|
||||||
|
if argv is None:
|
||||||
|
sys.exit( 0 )
|
||||||
|
|
||||||
|
# Parse command line arguments.
|
||||||
|
i = 1
|
||||||
|
while i < len(argv):
|
||||||
|
arg = argv[i]
|
||||||
|
|
||||||
|
if arg == '-o':
|
||||||
|
i = i + 1
|
||||||
|
out_file = argv[i]
|
||||||
|
|
||||||
|
elif arg == '-v':
|
||||||
|
verbose = 1
|
||||||
|
|
||||||
|
elif arg == '-q' or arg == '-quiet':
|
||||||
|
quiet = 1
|
||||||
|
|
||||||
|
elif arg == '-createonly':
|
||||||
|
createonly = 1
|
||||||
|
|
||||||
|
elif arg == '-separate':
|
||||||
|
separate = 1
|
||||||
|
|
||||||
|
elif arg == '-seperate':
|
||||||
|
separate = 1
|
||||||
|
|
||||||
|
elif arg == '-pct':
|
||||||
|
copy_pct = 1
|
||||||
|
|
||||||
|
elif arg == '-ot':
|
||||||
|
i = i + 1
|
||||||
|
band_type = gdal.GetDataTypeByName( argv[i] )
|
||||||
|
if band_type == gdal.GDT_Unknown:
|
||||||
|
print('Unknown GDAL data type: %s' % argv[i])
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
elif arg == '-init':
|
||||||
|
i = i + 1
|
||||||
|
str_pre_init = argv[i].split()
|
||||||
|
for x in str_pre_init:
|
||||||
|
pre_init.append(float(x))
|
||||||
|
|
||||||
|
elif arg == '-n':
|
||||||
|
i = i + 1
|
||||||
|
nodata = float(argv[i])
|
||||||
|
|
||||||
|
elif arg == '-a_nodata':
|
||||||
|
i = i + 1
|
||||||
|
a_nodata = float(argv[i])
|
||||||
|
|
||||||
|
elif arg == '-f':
|
||||||
|
# for backward compatibility.
|
||||||
|
i = i + 1
|
||||||
|
format = argv[i]
|
||||||
|
|
||||||
|
elif arg == '-of':
|
||||||
|
i = i + 1
|
||||||
|
format = argv[i]
|
||||||
|
|
||||||
|
elif arg == '-co':
|
||||||
|
i = i + 1
|
||||||
|
create_options.append( argv[i] )
|
||||||
|
|
||||||
|
elif arg == '-ps':
|
||||||
|
psize_x = float(argv[i+1])
|
||||||
|
psize_y = -1 * abs(float(argv[i+2]))
|
||||||
|
i = i + 2
|
||||||
|
|
||||||
|
elif arg == '-tap':
|
||||||
|
bTargetAlignedPixels = True
|
||||||
|
|
||||||
|
elif arg == '-ul_lr':
|
||||||
|
ulx = float(argv[i+1])
|
||||||
|
uly = float(argv[i+2])
|
||||||
|
lrx = float(argv[i+3])
|
||||||
|
lry = float(argv[i+4])
|
||||||
|
i = i + 4
|
||||||
|
|
||||||
|
elif arg[:1] == '-':
|
||||||
|
print('Unrecognized command option: %s' % arg)
|
||||||
|
Usage()
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
else:
|
||||||
|
names.append(arg)
|
||||||
|
|
||||||
|
i = i + 1
|
||||||
|
|
||||||
|
if len(names) == 0:
|
||||||
|
print('No input files selected.')
|
||||||
|
Usage()
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
Driver = gdal.GetDriverByName(format)
|
||||||
|
if Driver is None:
|
||||||
|
print('Format driver %s not found, pick a supported driver.' % format)
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
DriverMD = Driver.GetMetadata()
|
||||||
|
if 'DCAP_CREATE' not in DriverMD:
|
||||||
|
print('Format driver %s does not support creation and piecewise writing.\nPlease select a format that does, such as GTiff (the default) or HFA (Erdas Imagine).' % format)
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
# Collect information on all the source files.
|
||||||
|
file_infos = names_to_fileinfos( names )
|
||||||
|
|
||||||
|
if ulx is None:
|
||||||
|
ulx = file_infos[0].ulx
|
||||||
|
uly = file_infos[0].uly
|
||||||
|
lrx = file_infos[0].lrx
|
||||||
|
lry = file_infos[0].lry
|
||||||
|
|
||||||
|
for fi in file_infos:
|
||||||
|
ulx = min(ulx, fi.ulx)
|
||||||
|
uly = max(uly, fi.uly)
|
||||||
|
lrx = max(lrx, fi.lrx)
|
||||||
|
lry = min(lry, fi.lry)
|
||||||
|
|
||||||
|
if psize_x is None:
|
||||||
|
psize_x = file_infos[0].geotransform[1]
|
||||||
|
psize_y = file_infos[0].geotransform[5]
|
||||||
|
|
||||||
|
if band_type is None:
|
||||||
|
band_type = file_infos[0].band_type
|
||||||
|
|
||||||
|
# Try opening as an existing file.
|
||||||
|
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
|
||||||
|
t_fh = gdal.Open( out_file, gdal.GA_Update )
|
||||||
|
gdal.PopErrorHandler()
|
||||||
|
|
||||||
|
# Create output file if it does not already exist.
|
||||||
|
if t_fh is None:
|
||||||
|
|
||||||
|
if bTargetAlignedPixels:
|
||||||
|
ulx = math.floor(ulx / psize_x) * psize_x
|
||||||
|
lrx = math.ceil(lrx / psize_x) * psize_x
|
||||||
|
lry = math.floor(lry / -psize_y) * -psize_y
|
||||||
|
uly = math.ceil(uly / -psize_y) * -psize_y
|
||||||
|
|
||||||
|
geotransform = [ulx, psize_x, 0, uly, 0, psize_y]
|
||||||
|
|
||||||
|
xsize = int((lrx - ulx) / geotransform[1] + 0.5)
|
||||||
|
ysize = int((lry - uly) / geotransform[5] + 0.5)
|
||||||
|
|
||||||
|
|
||||||
|
if separate != 0:
|
||||||
|
bands=0
|
||||||
|
|
||||||
|
for fi in file_infos:
|
||||||
|
bands=bands + fi.bands
|
||||||
|
else:
|
||||||
|
bands = file_infos[0].bands
|
||||||
|
|
||||||
|
|
||||||
|
t_fh = Driver.Create( out_file, xsize, ysize, bands,
|
||||||
|
band_type, create_options )
|
||||||
|
if t_fh is None:
|
||||||
|
print('Creation failed, terminating gdal_merge.')
|
||||||
|
sys.exit( 1 )
|
||||||
|
|
||||||
|
t_fh.SetGeoTransform( geotransform )
|
||||||
|
t_fh.SetProjection( file_infos[0].projection )
|
||||||
|
|
||||||
|
if copy_pct:
|
||||||
|
t_fh.GetRasterBand(1).SetRasterColorTable(file_infos[0].ct)
|
||||||
|
else:
|
||||||
|
if separate != 0:
|
||||||
|
bands=0
|
||||||
|
for fi in file_infos:
|
||||||
|
bands=bands + fi.bands
|
||||||
|
if t_fh.RasterCount < bands :
|
||||||
|
print('Existing output file has less bands than the input files. You should delete it before. Terminating gdal_merge.')
|
||||||
|
sys.exit( 1 )
|
||||||
|
else:
|
||||||
|
bands = min(file_infos[0].bands,t_fh.RasterCount)
|
||||||
|
|
||||||
|
# Do we need to set nodata value ?
|
||||||
|
if a_nodata is not None:
|
||||||
|
for i in range(t_fh.RasterCount):
|
||||||
|
t_fh.GetRasterBand(i+1).SetNoDataValue(a_nodata)
|
||||||
|
|
||||||
|
# Do we need to pre-initialize the whole mosaic file to some value?
|
||||||
|
if pre_init is not None:
|
||||||
|
if t_fh.RasterCount <= len(pre_init):
|
||||||
|
for i in range(t_fh.RasterCount):
|
||||||
|
t_fh.GetRasterBand(i+1).Fill( pre_init[i] )
|
||||||
|
elif len(pre_init) == 1:
|
||||||
|
for i in range(t_fh.RasterCount):
|
||||||
|
t_fh.GetRasterBand(i+1).Fill( pre_init[0] )
|
||||||
|
|
||||||
|
# Copy data from source files into output file.
|
||||||
|
t_band = 1
|
||||||
|
|
||||||
|
if quiet == 0 and verbose == 0:
|
||||||
|
progress( 0.0 )
|
||||||
|
fi_processed = 0
|
||||||
|
|
||||||
|
for fi in file_infos:
|
||||||
|
if createonly != 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if verbose != 0:
|
||||||
|
print("")
|
||||||
|
print("Processing file %5d of %5d, %6.3f%% completed in %d minutes."
|
||||||
|
% (fi_processed+1,len(file_infos),
|
||||||
|
fi_processed * 100.0 / len(file_infos),
|
||||||
|
int(round((time.time() - start_time)/60.0)) ))
|
||||||
|
fi.report()
|
||||||
|
|
||||||
|
if separate == 0 :
|
||||||
|
for band in range(1, bands+1):
|
||||||
|
fi.copy_into( t_fh, band, band, nodata )
|
||||||
|
else:
|
||||||
|
for band in range(1, fi.bands+1):
|
||||||
|
fi.copy_into( t_fh, band, t_band, nodata )
|
||||||
|
t_band = t_band+1
|
||||||
|
|
||||||
|
fi_processed = fi_processed+1
|
||||||
|
if quiet == 0 and verbose == 0:
|
||||||
|
progress( fi_processed / float(len(file_infos)) )
|
||||||
|
|
||||||
|
# Force file to be closed.
|
||||||
|
t_fh = None
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
@ -0,0 +1,285 @@
|
|||||||
|
#==========================================================#
|
||||||
|
# Create a classifier for satellite images
|
||||||
|
#==========================================================#
|
||||||
|
|
||||||
|
# load modules
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import warnings
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.cm as cm
|
||||||
|
warnings.filterwarnings("ignore")
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from pylab import ginput
|
||||||
|
|
||||||
|
import SDS_download, SDS_preprocess, SDS_shoreline, SDS_tools, SDS_classification
|
||||||
|
|
||||||
|
filepath_sites = os.path.join(os.getcwd(), 'polygons')
|
||||||
|
sites = os.listdir(filepath_sites)
|
||||||
|
|
||||||
|
for site in sites:
|
||||||
|
|
||||||
|
polygon = SDS_tools.coords_from_kml(os.path.join(filepath_sites,site))
|
||||||
|
|
||||||
|
# load Sentinel-2 images
|
||||||
|
inputs = {
|
||||||
|
'polygon': polygon,
|
||||||
|
'dates': ['2016-10-01', '2016-11-01'],
|
||||||
|
'sat_list': ['S2'],
|
||||||
|
'sitename': site[:site.find('.')]
|
||||||
|
}
|
||||||
|
|
||||||
|
satname = inputs['sat_list'][0]
|
||||||
|
|
||||||
|
metadata = SDS_download.get_images(inputs)
|
||||||
|
metadata = SDS_download.remove_cloudy_images(metadata,inputs,0.2)
|
||||||
|
filepath = os.path.join(os.getcwd(), 'data', inputs['sitename'])
|
||||||
|
with open(os.path.join(filepath, inputs['sitename'] + '_metadata_' + satname + '.pkl'), 'wb') as f:
|
||||||
|
pickle.dump(metadata, f)
|
||||||
|
#with open(os.path.join(filepath, inputs['sitename'] + '_metadata_' + satname + '.pkl'), 'rb') as f:
|
||||||
|
# metadata = pickle.load(f)
|
||||||
|
|
||||||
|
# settings needed to run the shoreline extraction
|
||||||
|
settings = {
|
||||||
|
|
||||||
|
# general parameters:
|
||||||
|
'cloud_thresh': 0.1, # threshold on maximum cloud cover
|
||||||
|
'output_epsg': 28356, # epsg code of spatial reference system desired for the output
|
||||||
|
|
||||||
|
# shoreline detection parameters:
|
||||||
|
'min_beach_size': 20, # minimum number of connected pixels for a beach
|
||||||
|
'buffer_size': 7, # radius (in pixels) of disk for buffer around sandy pixels
|
||||||
|
'min_length_sl': 200, # minimum length of shoreline perimeter to be kept
|
||||||
|
'max_dist_ref': 100, # max distance (in meters) allowed from a reference shoreline
|
||||||
|
|
||||||
|
# quality control:
|
||||||
|
'check_detection': True, # if True, shows each shoreline detection and lets the user
|
||||||
|
# decide which ones are correct and which ones are false due to
|
||||||
|
# the presence of clouds
|
||||||
|
# also add the inputs
|
||||||
|
'inputs': inputs
|
||||||
|
}
|
||||||
|
# preprocess images (cloud masking, pansharpening/down-sampling)
|
||||||
|
SDS_preprocess.preprocess_all_images(metadata, settings)
|
||||||
|
|
||||||
|
training_data = dict([])
|
||||||
|
training_data['sand'] = dict([])
|
||||||
|
training_data['swash'] = dict([])
|
||||||
|
training_data['water'] = dict([])
|
||||||
|
training_data['land'] = dict([])
|
||||||
|
|
||||||
|
# read images
|
||||||
|
filepath = SDS_tools.get_filepath(inputs,satname)
|
||||||
|
filenames = metadata[satname]['filenames']
|
||||||
|
|
||||||
|
for i in range(len(filenames)):
|
||||||
|
|
||||||
|
fn = SDS_tools.get_filenames(filenames[i],filepath,satname)
|
||||||
|
im_ms, georef, cloud_mask, im20, imQA = SDS_preprocess.preprocess_single(fn,satname)
|
||||||
|
|
||||||
|
nrow = im_ms.shape[0]
|
||||||
|
ncol = im_ms.shape[1]
|
||||||
|
|
||||||
|
im_RGB = SDS_preprocess.rescale_image_intensity(im_ms[:,:,[2,1,0]], cloud_mask, 99.9)
|
||||||
|
plt.figure()
|
||||||
|
mng = plt.get_current_fig_manager()
|
||||||
|
mng.window.showMaximized()
|
||||||
|
plt.imshow(im_RGB)
|
||||||
|
plt.axis('off')
|
||||||
|
|
||||||
|
# Digitize sandy pixels
|
||||||
|
plt.title('Digitize SAND pixels', fontweight='bold', fontsize=15)
|
||||||
|
pt = ginput(n=1000, timeout=100000, show_clicks=True)
|
||||||
|
|
||||||
|
if len(pt) > 0:
|
||||||
|
pt = np.round(pt).astype(int)
|
||||||
|
im_sand = np.zeros((nrow,ncol))
|
||||||
|
for k in range(len(pt)):
|
||||||
|
im_sand[pt[k,1],pt[k,0]] = 1
|
||||||
|
im_RGB[pt[k,1],pt[k,0],0] = 1
|
||||||
|
im_RGB[pt[k,1],pt[k,0],1] = 0
|
||||||
|
im_RGB[pt[k,1],pt[k,0],2] = 0
|
||||||
|
im_sand = im_sand.astype(bool)
|
||||||
|
features = SDS_classification.calculate_features(im_ms, cloud_mask, im_sand)
|
||||||
|
else:
|
||||||
|
im_sand = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
features = []
|
||||||
|
training_data['sand'][filenames[i]] = {'pixels':im_sand,'features':features}
|
||||||
|
|
||||||
|
# Digitize swash pixels
|
||||||
|
plt.title('Digitize SWASH pixels', fontweight='bold', fontsize=15)
|
||||||
|
plt.draw()
|
||||||
|
pt = ginput(n=1000, timeout=100000, show_clicks=True)
|
||||||
|
|
||||||
|
if len(pt) > 0:
|
||||||
|
pt = np.round(pt).astype(int)
|
||||||
|
im_swash = np.zeros((nrow,ncol))
|
||||||
|
for k in range(len(pt)):
|
||||||
|
im_swash[pt[k,1],pt[k,0]] = 1
|
||||||
|
im_RGB[pt[k,1],pt[k,0],0] = 0
|
||||||
|
im_RGB[pt[k,1],pt[k,0],1] = 1
|
||||||
|
im_RGB[pt[k,1],pt[k,0],2] = 0
|
||||||
|
im_swash = im_swash.astype(bool)
|
||||||
|
features = SDS_classification.calculate_features(im_ms, cloud_mask, im_swash)
|
||||||
|
else:
|
||||||
|
im_swash = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
features = []
|
||||||
|
training_data['swash'][filenames[i]] = {'pixels':im_swash,'features':features}
|
||||||
|
|
||||||
|
# Digitize rectangle containig water pixels
|
||||||
|
plt.title('Click 2 points to draw a rectange in the WATER', fontweight='bold', fontsize=15)
|
||||||
|
plt.draw()
|
||||||
|
pt = ginput(n=2, timeout=100000, show_clicks=True)
|
||||||
|
if len(pt) > 0:
|
||||||
|
pt = np.round(pt).astype(int)
|
||||||
|
idx_row = np.arange(np.min(pt[:,1]),np.max(pt[:,1])+1,1)
|
||||||
|
idx_col = np.arange(np.min(pt[:,0]),np.max(pt[:,0])+1,1)
|
||||||
|
xx, yy = np.meshgrid(idx_row,idx_col, indexing='ij')
|
||||||
|
rows = xx.reshape(xx.shape[0]*xx.shape[1])
|
||||||
|
cols = yy.reshape(yy.shape[0]*yy.shape[1])
|
||||||
|
im_water = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
for k in range(len(rows)):
|
||||||
|
im_water[rows[k],cols[k]] = 1
|
||||||
|
im_RGB[rows[k],cols[k],0] = 0
|
||||||
|
im_RGB[rows[k],cols[k],1] = 0
|
||||||
|
im_RGB[rows[k],cols[k],2] = 1
|
||||||
|
im_water = im_water.astype(bool)
|
||||||
|
features = SDS_classification.calculate_features(im_ms, cloud_mask, im_water)
|
||||||
|
else:
|
||||||
|
im_water = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
features = []
|
||||||
|
training_data['water'][filenames[i]] = {'pixels':im_water,'features':features}
|
||||||
|
|
||||||
|
# Digitize rectangle containig land pixels
|
||||||
|
plt.title('Click 2 points to draw a rectange in the LAND', fontweight='bold', fontsize=15)
|
||||||
|
plt.draw()
|
||||||
|
pt = ginput(n=2, timeout=100000, show_clicks=True)
|
||||||
|
plt.close()
|
||||||
|
if len(pt) > 0:
|
||||||
|
pt = np.round(pt).astype(int)
|
||||||
|
idx_row = np.arange(np.min(pt[:,1]),np.max(pt[:,1])+1,1)
|
||||||
|
idx_col = np.arange(np.min(pt[:,0]),np.max(pt[:,0])+1,1)
|
||||||
|
xx, yy = np.meshgrid(idx_row,idx_col, indexing='ij')
|
||||||
|
rows = xx.reshape(xx.shape[0]*xx.shape[1])
|
||||||
|
cols = yy.reshape(yy.shape[0]*yy.shape[1])
|
||||||
|
im_land = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
for k in range(len(rows)):
|
||||||
|
im_land[rows[k],cols[k]] = 1
|
||||||
|
im_RGB[rows[k],cols[k],0] = 1
|
||||||
|
im_RGB[rows[k],cols[k],1] = 1
|
||||||
|
im_RGB[rows[k],cols[k],2] = 0
|
||||||
|
im_land = im_land.astype(bool)
|
||||||
|
features = SDS_classification.calculate_features(im_ms, cloud_mask, im_land)
|
||||||
|
else:
|
||||||
|
im_land = np.zeros((nrow,ncol)).astype(bool)
|
||||||
|
features = []
|
||||||
|
training_data['land'][filenames[i]] = {'pixels':im_land,'features':features}
|
||||||
|
|
||||||
|
plt.figure()
|
||||||
|
plt.title('Classified image')
|
||||||
|
plt.imshow(im_RGB)
|
||||||
|
|
||||||
|
# save training data for each site
|
||||||
|
filepath = os.path.join(os.getcwd(), 'data', inputs['sitename'])
|
||||||
|
with open(os.path.join(filepath, inputs['sitename'] + '_training_' + satname + '.pkl'), 'wb') as f:
|
||||||
|
pickle.dump(training_data, f)
|
||||||
|
#%%
|
||||||
|
|
||||||
|
## load Landsat 5 images
|
||||||
|
#inputs = {
|
||||||
|
# 'polygon': polygon,
|
||||||
|
# 'dates': ['1987-01-01', '1988-01-01'],
|
||||||
|
# 'sat_list': ['L5'],
|
||||||
|
# 'sitename': site[:site.find('.')]
|
||||||
|
# }
|
||||||
|
#metadata = SDS_download.get_images(inputs)
|
||||||
|
#
|
||||||
|
## load Landsat 7 images
|
||||||
|
#inputs = {
|
||||||
|
# 'polygon': polygon,
|
||||||
|
# 'dates': ['2001-01-01', '2002-01-01'],
|
||||||
|
# 'sat_list': ['L7'],
|
||||||
|
# 'sitename': site[:site.find('.')]
|
||||||
|
# }
|
||||||
|
#metadata = SDS_download.get_images(inputs)
|
||||||
|
#
|
||||||
|
## load Landsat 8 images
|
||||||
|
#inputs = {
|
||||||
|
# 'polygon': polygon,
|
||||||
|
# 'dates': ['2014-01-01', '2015-01-01'],
|
||||||
|
# 'sat_list': ['L8'],
|
||||||
|
# 'sitename': site[:site.find('.')]
|
||||||
|
# }
|
||||||
|
#metadata = SDS_download.get_images(inputs)
|
||||||
|
|
||||||
|
|
||||||
|
#%% clean the Landsat collections
|
||||||
|
|
||||||
|
#import ee
|
||||||
|
#from datetime import datetime, timedelta
|
||||||
|
#import pytz
|
||||||
|
#import copy
|
||||||
|
#ee.Initialize()
|
||||||
|
#site = sites[0]
|
||||||
|
#dates = ['2017-12-01', '2017-12-25']
|
||||||
|
#polygon = SDS_tools.coords_from_kml(os.path.join(filepath_sites,site))
|
||||||
|
## Landsat collection
|
||||||
|
#input_col = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA')
|
||||||
|
## filter by location and dates
|
||||||
|
#flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(inputs['dates'][0],inputs['dates'][1])
|
||||||
|
## get all images in the filtered collection
|
||||||
|
#im_all = flt_col.getInfo().get('features')
|
||||||
|
#cloud_cover = [_['properties']['CLOUD_COVER'] for _ in im_all]
|
||||||
|
#if np.any([_ > 90 for _ in cloud_cover]):
|
||||||
|
# idx_delete = np.where([_ > 90 for _ in cloud_cover])[0]
|
||||||
|
# im_all_cloud = [x for k,x in enumerate(im_all) if k not in idx_delete]
|
||||||
|
|
||||||
|
|
||||||
|
#%% clean the S2 collection
|
||||||
|
|
||||||
|
#import ee
|
||||||
|
#from datetime import datetime, timedelta
|
||||||
|
#import pytz
|
||||||
|
#import copy
|
||||||
|
#ee.Initialize()
|
||||||
|
## Sentinel2 collection
|
||||||
|
#input_col = ee.ImageCollection('COPERNICUS/S2')
|
||||||
|
## filter by location and dates
|
||||||
|
#flt_col = input_col.filterBounds(ee.Geometry.Polygon(polygon)).filterDate(inputs['dates'][0],inputs['dates'][1])
|
||||||
|
## get all images in the filtered collection
|
||||||
|
#im_all = flt_col.getInfo().get('features')
|
||||||
|
#
|
||||||
|
## remove duplicates (there are many in S2 collection)
|
||||||
|
## timestamps
|
||||||
|
#timestamps = [datetime.fromtimestamp(_['properties']['system:time_start']/1000, tz=pytz.utc) for _ in im_all]
|
||||||
|
## utm zones
|
||||||
|
#utm_zones = np.array([int(_['bands'][0]['crs'][5:]) for _ in im_all])
|
||||||
|
#utm_zone_selected = np.max(np.unique(utm_zones))
|
||||||
|
#idx_all = np.arange(0,len(im_all),1)
|
||||||
|
#idx_covered = np.ones(len(im_all)).astype(bool)
|
||||||
|
#idx_delete = []
|
||||||
|
#i = 0
|
||||||
|
#while 1:
|
||||||
|
# same_time = np.abs([(timestamps[i]-_).total_seconds() for _ in timestamps]) < 60*60*24
|
||||||
|
# idx_same_time = np.where(same_time)[0]
|
||||||
|
# same_utm = utm_zones == utm_zone_selected
|
||||||
|
# idx_temp = np.where([same_time[j] == True and same_utm[j] == False for j in idx_all])[0]
|
||||||
|
# idx_keep = idx_same_time[[_ not in idx_temp for _ in idx_same_time ]]
|
||||||
|
# if len(idx_keep) > 2: # if more than 2 images with same date and same utm, drop the last one
|
||||||
|
# idx_temp = np.append(idx_temp,idx_keep[-1])
|
||||||
|
# for j in idx_temp:
|
||||||
|
# idx_delete.append(j)
|
||||||
|
# idx_covered[idx_same_time] = False
|
||||||
|
# if np.any(idx_covered):
|
||||||
|
# i = np.where(idx_covered)[0][0]
|
||||||
|
# else:
|
||||||
|
# break
|
||||||
|
#im_all_updated = [x for k,x in enumerate(im_all) if k not in idx_delete]
|
||||||
|
#
|
||||||
|
## remove very cloudy images (>90% cloud)
|
||||||
|
#cloud_cover = [_['properties']['CLOUDY_PIXEL_PERCENTAGE'] for _ in im_all_updated]
|
||||||
|
#if np.any([_ > 90 for _ in cloud_cover]):
|
||||||
|
# idx_delete = np.where([_ > 90 for _ in cloud_cover])[0]
|
||||||
|
# im_all_cloud = [x for k,x in enumerate(im_all_updated) if k not in idx_delete]
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue