added download and registration
parent
c068d7e928
commit
a227b77ad3
@ -0,0 +1 @@
|
|||||||
|
"C:\Program Files\Adobe\Adobe Photoshop 2022\Photoshop.exe" -r "C:\Users\z5079346\OneDrive - UNSW\Projects\Coastsnap\coastsnap\photoshop_registration_all_sites.jsx"
|
@ -0,0 +1,318 @@
|
|||||||
|
// TO DO
|
||||||
|
|
||||||
|
// Create another script that can run
|
||||||
|
// for site in sites:
|
||||||
|
// for year in years:
|
||||||
|
// register_images()
|
||||||
|
|
||||||
|
// VARIABLE DEFINITIONS
|
||||||
|
|
||||||
|
// images_to_register = All images to be registered for a given site and year (no target or seed)
|
||||||
|
// batch_images_to_register = Batch images to be registered (no target or seed)
|
||||||
|
// batch_images_all = Target, seed and batch images to be registered
|
||||||
|
|
||||||
|
var batch_size = 10;
|
||||||
|
|
||||||
|
// Must use forwardslashes in filepath, not backslashes
|
||||||
|
var batch_download_csv = File("C:/Users/z5079346/OneDrive - UNSW/Projects/Coastsnap_test/CoastSnap_Sites.csv")
|
||||||
|
|
||||||
|
// retreive site names from batch_download.csv
|
||||||
|
var csv_data=[];
|
||||||
|
batch_download_csv.open('r');
|
||||||
|
while(!batch_download_csv.eof){
|
||||||
|
var InputLine = batch_download_csv.readln();
|
||||||
|
if(InputLine.length > 3) csv_data.push(InputLine);
|
||||||
|
}
|
||||||
|
batch_download_csv.close();
|
||||||
|
var site_names = csv_data.toString().split(",")
|
||||||
|
|
||||||
|
// Images parent directory
|
||||||
|
var parent_folder_path = "C:/Users/z5079346/OneDrive - UNSW/Projects/Coastsnap_test/Images_Test";
|
||||||
|
|
||||||
|
var batch_images_to_register = []; // Used in exportLayersToPNG
|
||||||
|
|
||||||
|
// Loop through sites
|
||||||
|
for(var i=5; i<site_names.length; i+=5) {
|
||||||
|
var site_name = site_names[i];
|
||||||
|
var site_path = parent_folder_path + "/" + site_name;
|
||||||
|
|
||||||
|
// Retrieve target and seed images for the site
|
||||||
|
var target_folder = new Folder(site_path + '/Target Image');
|
||||||
|
var seed_folder = new Folder(site_path + '/Target Image' + '/Seed Images');
|
||||||
|
var target_image = target_folder.getFiles("Target.jpg");
|
||||||
|
var seed_images = seed_folder.getFiles("*.jpg");
|
||||||
|
|
||||||
|
// Retrieve processed image years for the site
|
||||||
|
var processed_folder = new Folder(site_path + '/Processed');
|
||||||
|
var processed_subFolders = processed_folder.getFiles()
|
||||||
|
|
||||||
|
// Loop through years
|
||||||
|
for (var j = 0; j < processed_subFolders.length; j++) {
|
||||||
|
|
||||||
|
var year = processed_subFolders[j].name.toString();
|
||||||
|
|
||||||
|
var processed_folder = new Folder(site_path + '/Processed/' + year);
|
||||||
|
var photoshop_folder = new Folder(site_path + '/Photoshop/' + year);
|
||||||
|
var processed_images_all = processed_folder.getFiles("*.jpg");
|
||||||
|
var photoshop_images = photoshop_folder.getFiles("*.jpg");
|
||||||
|
|
||||||
|
// Ignore images in processed that have already been registered
|
||||||
|
var images_to_register = imagesNotRegistered(processed_images_all, photoshop_images);
|
||||||
|
|
||||||
|
// Register images in batches, to avoid reaching the image limit that causes photoshop to crash
|
||||||
|
for (var k = 0; k < images_to_register.length; k += batch_size) {
|
||||||
|
batch_register_images(k, site_path, site_name, images_to_register, target_image, seed_images);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// This is the main function, responsible for calling photoshop functions
|
||||||
|
// in a sequential order to register all_images
|
||||||
|
function batch_register_images(batchIndex, site_path, site_name, images_to_register, target_image, seed_images) {
|
||||||
|
|
||||||
|
var batch_images_all = [];
|
||||||
|
batch_images_to_register = [];
|
||||||
|
batch_images_all.push(target_image[0])
|
||||||
|
|
||||||
|
for (var i = 0; i < seed_images.length; i++ ) {
|
||||||
|
batch_images_all.push(seed_images[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = batchIndex*1; i < batchIndex + batch_size; i++) {
|
||||||
|
if(i < images_to_register.length) {
|
||||||
|
batch_images_all.push(images_to_register[i]);
|
||||||
|
batch_images_to_register.push(images_to_register[i].name);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stackFiles(batch_images_all);
|
||||||
|
lockTarget();
|
||||||
|
selectAllLayers();
|
||||||
|
autoAlign();
|
||||||
|
var target_size = cropToTarget();
|
||||||
|
var target_width = target_size[0];
|
||||||
|
var target_height = target_size[1];
|
||||||
|
|
||||||
|
savePhotoshopDocument(site_path, site_name);
|
||||||
|
exportLayersToPNG(target_width, target_height, batch_images_to_register, seed_images); // Won't overwrite images that already exist
|
||||||
|
|
||||||
|
app.activeDocument.close(SaveOptions.DONOTSAVECHANGES)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function imagesNotRegistered(site_images_all, photoshop_images) {
|
||||||
|
|
||||||
|
var site_images = [];
|
||||||
|
|
||||||
|
// Extract images that haven't been registered
|
||||||
|
for (var i = 0; i < site_images_all.length; i ++) { // For each site image
|
||||||
|
var isRegistered = false;
|
||||||
|
for (var j = 0; j < photoshop_images.length; j++ ) { // Check if already exists in photoshop images
|
||||||
|
var site_name_CHECK = site_images_all[i].name.slice(0,-4) + '_CHECK.jpg';
|
||||||
|
if((site_images_all[i].name.toString() == photoshop_images[j].name.toString()) || (site_name_CHECK.toString() == photoshop_images[j].name.toString())) {
|
||||||
|
isRegistered = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(!isRegistered) { // If it doesn't, register it
|
||||||
|
site_images.push(site_images_all[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return site_images
|
||||||
|
}
|
||||||
|
|
||||||
|
function stackFiles(sFiles){
|
||||||
|
|
||||||
|
var loadLayersFromScript = true;
|
||||||
|
|
||||||
|
var SCRIPTS_FOLDER = decodeURI(app.path + '/' + localize('$$$/ScriptingSupport/InstalledScripts=Presets/Scripts'));
|
||||||
|
|
||||||
|
$.evalFile( new File(SCRIPTS_FOLDER + '/Load Files into Stack.jsx'));
|
||||||
|
|
||||||
|
loadLayers.intoStack(sFiles, false);
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function selectAllLayers() {
|
||||||
|
|
||||||
|
var desc = new ActionDescriptor();
|
||||||
|
|
||||||
|
var ref = new ActionReference();
|
||||||
|
|
||||||
|
ref.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') );
|
||||||
|
|
||||||
|
desc.putReference( charIDToTypeID('null'), ref );
|
||||||
|
|
||||||
|
executeAction( stringIDToTypeID('selectAllLayers'), desc, DialogModes.NO );
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function autoAlign() {
|
||||||
|
|
||||||
|
var desc = new ActionDescriptor();
|
||||||
|
|
||||||
|
var ref = new ActionReference();
|
||||||
|
|
||||||
|
ref.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') );
|
||||||
|
|
||||||
|
desc.putReference( charIDToTypeID('null'), ref );
|
||||||
|
|
||||||
|
desc.putEnumerated( charIDToTypeID('Usng'), charIDToTypeID('ADSt'), stringIDToTypeID('ADSContent') );
|
||||||
|
|
||||||
|
desc.putEnumerated( charIDToTypeID('Aply'), stringIDToTypeID('projection'), charIDToTypeID('Auto') );
|
||||||
|
|
||||||
|
desc.putBoolean( stringIDToTypeID('vignette'), false );
|
||||||
|
|
||||||
|
desc.putBoolean( stringIDToTypeID('radialDistort'), false );
|
||||||
|
|
||||||
|
executeAction( charIDToTypeID('Algn'), desc, DialogModes.NO );
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function lockTarget() {
|
||||||
|
var layerRef = app.activeDocument.artLayers.getByName("Target.jpg")
|
||||||
|
layerRef.allLocked = true;
|
||||||
|
app.activeDocument.activeLayer.linkedLayers
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// CROP
|
||||||
|
function cropToTarget() {
|
||||||
|
var layerRef = app.activeDocument.artLayers.getByName("Target.jpg")
|
||||||
|
app.activeDocument.crop(layerRef.bounds)
|
||||||
|
var theBounds = app.activeDocument.activeLayer.bounds;
|
||||||
|
var layerWidth = theBounds[2] - theBounds[0];
|
||||||
|
var layerHeight = theBounds[3] - theBounds[1];
|
||||||
|
return [layerWidth, layerHeight];
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// TO BE IMPLEMENTED
|
||||||
|
// function imageSize(layerRef) {
|
||||||
|
|
||||||
|
// }
|
||||||
|
|
||||||
|
// SAVE PHOTOSHOP DOCUMENT
|
||||||
|
function savePhotoshopDocument(site_path, site_name) {
|
||||||
|
var PSdocumentFile = new File(site_path + '/' + site_name + '.psd');
|
||||||
|
app.activeDocument.saveAs(PSdocumentFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// EXPORT LAYERS TO IMAGES
|
||||||
|
|
||||||
|
// $.evalFile(File(app.path + '/Presets/Scripts/Export Layers To Files.jsx'));
|
||||||
|
function exportLayersToPNG(target_width, target_height, batch_images_to_register, seed_images){
|
||||||
|
if(!documents.length) return;
|
||||||
|
var doc = activeDocument;
|
||||||
|
var oldPath = activeDocument.path;
|
||||||
|
|
||||||
|
var outFolder = new Folder(oldPath + "/Photoshop/" + year);
|
||||||
|
if (!outFolder.exists) {
|
||||||
|
outFolder.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
scanLayerSets(doc);
|
||||||
|
|
||||||
|
function scanLayerSets(el) {
|
||||||
|
|
||||||
|
// // find layer groups
|
||||||
|
// for(var a=0;a<el.layerSets.length;a++){
|
||||||
|
// var lname = el.layerSets[a].name;
|
||||||
|
// if (lname.substr(-4) == ".jpg") {
|
||||||
|
// saveLayer(el.layers.getByName(lname), lname, oldPath, true);
|
||||||
|
// } else {
|
||||||
|
// // recursive
|
||||||
|
// scanLayerSets(el.layerSets[a]);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
var layerToRegister = true;
|
||||||
|
// find plain layers in current group that are site images only (not target or seed)
|
||||||
|
for(var j=0; j<el.artLayers.length; j++) { // Loop through photoshop document layers (images)
|
||||||
|
var name = el.artLayers[j].name;
|
||||||
|
layerToRegister = true;
|
||||||
|
|
||||||
|
// for (var i = 0; i < seed_images.length; i++ ) {
|
||||||
|
// if((seed_images[i].toString() == name.toString()) || (name.toString() == 'Target.jpg')) {
|
||||||
|
// layerToRegister = false;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// if(layerToRegister) {
|
||||||
|
|
||||||
|
// app.activeDocument.activeLayer = el.layers.getByName(name);
|
||||||
|
// var theBounds = app.activeDocument.activeLayer.bounds;
|
||||||
|
// var aligned_image_width = theBounds[2] - theBounds[0];
|
||||||
|
// var aligned_image_height = theBounds[3] - theBounds[1];
|
||||||
|
|
||||||
|
// var name_updated = name;
|
||||||
|
// // COMAPRE THE DIMENSIONS OF THE ALIGNED IMAGE WITH THE TARGET
|
||||||
|
// // IF SIGNIFICANT DIFFERENCE (30%), TAG IMAGE WITH '_CHECK.jpg'
|
||||||
|
// if((aligned_image_width/target_width) < 0.7 || (aligned_image_height/target_height) < 0.7) {
|
||||||
|
// name_updated = name.slice(0,-4) + '_CHECK.jpg';
|
||||||
|
// }
|
||||||
|
|
||||||
|
// saveLayer(el.layers.getByName(name), name_updated, oldPath, false);
|
||||||
|
// }
|
||||||
|
|
||||||
|
for (var i = 0; i < batch_images_to_register.length; i++ ) { // Loop through batch_images_to_register
|
||||||
|
if(batch_images_to_register[i].toString() == name.toString()) {
|
||||||
|
|
||||||
|
app.activeDocument.activeLayer = el.layers.getByName(name);
|
||||||
|
var theBounds = app.activeDocument.activeLayer.bounds;
|
||||||
|
var aligned_image_width = theBounds[2] - theBounds[0];
|
||||||
|
var aligned_image_height = theBounds[3] - theBounds[1];
|
||||||
|
|
||||||
|
var name_updated = name;
|
||||||
|
// COMAPRE THE DIMENSIONS OF THE ALIGNED IMAGE WITH THE TARGET
|
||||||
|
// IF SIGNIFICANT DIFFERENCE (30%), TAG IMAGE WITH '_CHECK.jpg'
|
||||||
|
if((aligned_image_width/target_width) < 0.7 || (aligned_image_height/target_height) < 0.7) {
|
||||||
|
name_updated = name.slice(0,-4) + '_CHECK.jpg';
|
||||||
|
}
|
||||||
|
|
||||||
|
saveLayer(el.layers.getByName(name), name_updated, oldPath, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function saveLayer(layer, lname, path, shouldMerge) {
|
||||||
|
activeDocument.activeLayer = layer;
|
||||||
|
dupLayers();
|
||||||
|
if (shouldMerge === undefined || shouldMerge === true) {
|
||||||
|
activeDocument.mergeVisibleLayers();
|
||||||
|
}
|
||||||
|
//activeDocument.trim(TrimType.TRANSPARENT,true,true,true,true);
|
||||||
|
var saveFile = File(path +"/Photoshop/"+year+"/"+lname);
|
||||||
|
SavePNG(saveFile);
|
||||||
|
app.activeDocument.close(SaveOptions.DONOTSAVECHANGES);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function dupLayers() {
|
||||||
|
var desc143 = new ActionDescriptor();
|
||||||
|
var ref73 = new ActionReference();
|
||||||
|
ref73.putClass( charIDToTypeID('Dcmn') );
|
||||||
|
desc143.putReference( charIDToTypeID('null'), ref73 );
|
||||||
|
desc143.putString( charIDToTypeID('Nm '), activeDocument.activeLayer.name );
|
||||||
|
var ref74 = new ActionReference();
|
||||||
|
ref74.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') );
|
||||||
|
desc143.putReference( charIDToTypeID('Usng'), ref74 );
|
||||||
|
executeAction( charIDToTypeID('Mk '), desc143, DialogModes.NO );
|
||||||
|
};
|
||||||
|
|
||||||
|
function SavePNG(saveFile){
|
||||||
|
var pngOpts = new ExportOptionsSaveForWeb;
|
||||||
|
pngOpts.format = SaveDocumentType.PNG
|
||||||
|
pngOpts.PNG8 = false;
|
||||||
|
pngOpts.transparency = true;
|
||||||
|
pngOpts.interlaced = false;
|
||||||
|
pngOpts.quality = 200;
|
||||||
|
activeDocument.exportDocument(new File(saveFile),ExportType.SAVEFORWEB,pngOpts);
|
||||||
|
}
|
@ -0,0 +1,3 @@
|
|||||||
|
call activate coastsnap
|
||||||
|
python "C:\Users\z5079346\OneDrive - UNSW\Projects\Coastsnap\coastsnap\spotteron_batch_download.py"
|
||||||
|
call conda deactivate
|
@ -0,0 +1,285 @@
|
|||||||
|
"""
|
||||||
|
Test download
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from os import path, makedirs
|
||||||
|
|
||||||
|
import attr
|
||||||
|
import pytz
|
||||||
|
import requests
|
||||||
|
#import typer
|
||||||
|
import pandas as pd
|
||||||
|
from loguru import logger
|
||||||
|
from timezonefinder import TimezoneFinder
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
|
#app = typer.Typer()
|
||||||
|
coastsnap_sites = pd.read_csv("C:/Users/z5079346/OneDrive - UNSW/Projects/Coastsnap_test/CoastSnap_Sites.csv")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s()
|
||||||
|
class SpotteronImage:
|
||||||
|
"""
|
||||||
|
Parses the dictionary from the Spotteron API into an object to make it easier to
|
||||||
|
get the required parameters
|
||||||
|
"""
|
||||||
|
|
||||||
|
raw_data = attr.ib()
|
||||||
|
site_name = attr.ib()
|
||||||
|
|
||||||
|
__img_url = "https://files.spotteron.com/images/spots/"
|
||||||
|
|
||||||
|
def exists(self, folder):
|
||||||
|
"""
|
||||||
|
Check if image has already been downloaded
|
||||||
|
"""
|
||||||
|
folder = path.join(folder, str(self.dt.year))
|
||||||
|
output_filepath = Path(folder, self.output_filename)
|
||||||
|
|
||||||
|
if output_filepath.is_file():
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def save(self, folder):
|
||||||
|
|
||||||
|
# Concatenate year to parent dir
|
||||||
|
# For example: "C:\Users\z5079346\OneDrive - UNSW\My files\CoastSnap\Images\alex\Processed"
|
||||||
|
# + "\2022"
|
||||||
|
folder = path.join(folder, str(self.dt.year))
|
||||||
|
|
||||||
|
# Check if the folder already exists
|
||||||
|
if not path.exists(folder):
|
||||||
|
makedirs(folder)
|
||||||
|
|
||||||
|
# Concatentate filename to parent dir
|
||||||
|
# For example: "C:\Users\z5079346\OneDrive - UNSW\My files\CoastSnap\Images\alex\Processed\2022"
|
||||||
|
# + "\1641158046.Mon.Jan.03_07_14_06.AEST.2022.alex.snap.Raymond_b.jpg"
|
||||||
|
output_filepath = Path(folder, self.output_filename)
|
||||||
|
|
||||||
|
logger.info(f"Downloading {output_filepath}")
|
||||||
|
response = requests.get(self.url, stream=True)
|
||||||
|
if response.status_code == 200:
|
||||||
|
with open(output_filepath, "wb") as f:
|
||||||
|
f.write(response.content)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
return self.raw_data["id"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lat(self):
|
||||||
|
return self.raw_data["attributes"]["latitude"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lon(self):
|
||||||
|
return self.raw_data["attributes"]["longitude"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tz(self):
|
||||||
|
"""
|
||||||
|
Finds timezone based on lon/lat
|
||||||
|
"""
|
||||||
|
|
||||||
|
tf = TimezoneFinder()
|
||||||
|
return tf.timezone_at(lng=self.lon, lat=self.lat)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dt(self):
|
||||||
|
"""
|
||||||
|
Parses 'spotted at' attributes and returns a timezone aware python datetime
|
||||||
|
"""
|
||||||
|
spotted_at = self.raw_data["attributes"]["spotted_at"]
|
||||||
|
spotted_dt = datetime.datetime.strptime(spotted_at, "%Y-%m-%d %H:%M:%S")
|
||||||
|
spotted_dt_tz = pytz.timezone(self.tz).localize(spotted_dt)
|
||||||
|
return spotted_dt_tz
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timestamp(self):
|
||||||
|
return datetime.datetime.timestamp(self.dt)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self):
|
||||||
|
"""
|
||||||
|
URL to download the image
|
||||||
|
"""
|
||||||
|
img_name = f"{self.raw_data['attributes']['image']}.jpg"
|
||||||
|
return urljoin(self.__img_url, img_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def author(self):
|
||||||
|
author = self.raw_data["attributes"]["spotted_by_name"]
|
||||||
|
|
||||||
|
# Sanitize author and remove spaces
|
||||||
|
author = secure_filename(author)
|
||||||
|
author = re.sub(r"\s+", "", author)
|
||||||
|
return author
|
||||||
|
|
||||||
|
@property
|
||||||
|
def output_filename(self):
|
||||||
|
"""
|
||||||
|
Define the name of the image depending on its properties. Optional site_name
|
||||||
|
can be included.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.site_name:
|
||||||
|
return (
|
||||||
|
f"{int(self.timestamp)}."
|
||||||
|
f'{self.dt.strftime("%a.%b.%d_%H_%M_%S.")}{self.dt.tzname()}.{self.dt.strftime("%Y")}.'
|
||||||
|
f"{self.site_name}.snap.{self.author}.jpg"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("Please provide a site-name. Otherwise file names won't follow the reccomended naming convention")
|
||||||
|
return (
|
||||||
|
f"{int(self.timestamp)}."
|
||||||
|
f'{self.dt.strftime("%a.%b.%d_%H_%M_%S.%z.%Y")}.{self.author}.jpg'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class Spotteron:
|
||||||
|
"""
|
||||||
|
Refer to https://www.spotteron.com/docs/api/v2?topic_id=37&key=LDazWbK5n62lbNA4hRNHtLa6hkyqz6Tr
|
||||||
|
for API documentation
|
||||||
|
"""
|
||||||
|
|
||||||
|
api_url = "https://www.spotteron.com/api/v2/spots"
|
||||||
|
|
||||||
|
def save_images(self, root_id, output_folder, site_name, limit, overwrite):
|
||||||
|
|
||||||
|
page = 1
|
||||||
|
n_downloaded = 0
|
||||||
|
while True:
|
||||||
|
json_data = self.get_data(page=page, root_id=root_id)
|
||||||
|
images = [
|
||||||
|
SpotteronImage(raw_data=x, site_name=site_name)
|
||||||
|
for x in json_data["data"]
|
||||||
|
]
|
||||||
|
|
||||||
|
if not images:
|
||||||
|
logger.info("No images returned. Check correct root_id is supplied")
|
||||||
|
|
||||||
|
for img in images:
|
||||||
|
|
||||||
|
if img.exists(output_folder) and overwrite == False:
|
||||||
|
logger.info("Existing images found. Stopping getting images")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
img.save(output_folder) # THIS SHOULD BE THE PARENT DIR
|
||||||
|
n_downloaded += 1
|
||||||
|
|
||||||
|
if n_downloaded >= limit:
|
||||||
|
logger.info(f"Downloaded limit of {limit} images. Stopping.")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Break out of the nested loop if we break on an image
|
||||||
|
else:
|
||||||
|
page += 1
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
logger.info("Download completed")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_data(self, page, root_id=None):
|
||||||
|
"""
|
||||||
|
Gets the json data for a particular topic_id and root_id. Returns a dictionary
|
||||||
|
containing data returned by api.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Defined by Spotteron for coastsnap stations
|
||||||
|
topic_id = 37
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"filter[topic_id]": topic_id,
|
||||||
|
"limit": 5,
|
||||||
|
"page": page,
|
||||||
|
}
|
||||||
|
|
||||||
|
if root_id:
|
||||||
|
payload["filter[root_id]"] = root_id
|
||||||
|
|
||||||
|
r = requests.get(self.api_url, params=payload)
|
||||||
|
return r.json()
|
||||||
|
|
||||||
|
|
||||||
|
# @app.command()
|
||||||
|
# def from_spotteron(
|
||||||
|
# root_id: int = typer.Argument(..., help="Spotteron id of Coastsnap station."),
|
||||||
|
# output_folder: str = typer.Argument(..., help="Path to save images to."),
|
||||||
|
# site_name: str = typer.Option(None, help="Add site to filename."),
|
||||||
|
# limit: int = typer.Option(30, help="Max number of images to save."),
|
||||||
|
# overwrite: bool = typer.Option(False, help="Overwrite downloaded images?"),
|
||||||
|
# ):
|
||||||
|
"""
|
||||||
|
Downloads images from Spotteron API and saves to folder
|
||||||
|
|
||||||
|
"""
|
||||||
|
def from_spotteron(root_id, output_folder, site_name, limit, overwrite):
|
||||||
|
spot = Spotteron()
|
||||||
|
spot.save_images(root_id, output_folder, site_name, limit, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# @app.command()
|
||||||
|
# def from_spotteron_batch(
|
||||||
|
# overwrite: bool = typer.Option(False, help="Overwrite downloaded images?"),
|
||||||
|
# ):
|
||||||
|
# """
|
||||||
|
# Downloads images from Spotteron API for all beaches specified in batch_download.csv
|
||||||
|
# """
|
||||||
|
|
||||||
|
# #all_beaches = pd.read_csv(r"C:\Users\z5079346\OneDrive - UNSW\Code\coastsnap\coastsnap\spotteron_batch_download\batch_download.csv")
|
||||||
|
|
||||||
|
# # Retrieve Parent Directory in batch_download.csv
|
||||||
|
# parent_directory = coastsnap_sites.parent_directory[0]
|
||||||
|
# print(parent_directory)
|
||||||
|
|
||||||
|
# for index, beach in coastsnap_sites.iterrows():
|
||||||
|
|
||||||
|
# # Concatentate the parent directory, site name and 'Processed'
|
||||||
|
# # to create the output site_path
|
||||||
|
# site_name = beach.site_name
|
||||||
|
# site_path = path.join(parent_directory, site_name, 'Processed')
|
||||||
|
|
||||||
|
# # Download the images for a given site
|
||||||
|
# logger.info(f"Downloading images for {beach.site_name}")
|
||||||
|
# from_spotteron(beach.root_id, site_path, site_name, limit = beach.limit, overwrite = overwrite)
|
||||||
|
|
||||||
|
# if __name__ == "__main__":
|
||||||
|
# app()
|
||||||
|
|
||||||
|
#overwrite: bool = typer.Option(False, help="Overwrite downloaded images?"),
|
||||||
|
# ):
|
||||||
|
"""
|
||||||
|
Downloads images from Spotteron API for all beaches specified in batch_download.csv
|
||||||
|
"""
|
||||||
|
|
||||||
|
#all_beaches = pd.read_csv(r"C:\Users\z5079346\OneDrive - UNSW\Code\coastsnap\coastsnap\spotteron_batch_download\batch_download.csv")
|
||||||
|
|
||||||
|
# Retrieve Parent Directory in batch_download.csv
|
||||||
|
parent_directory = coastsnap_sites.parent_directory[0]
|
||||||
|
print(parent_directory)
|
||||||
|
|
||||||
|
for index, beach in coastsnap_sites.iterrows():
|
||||||
|
|
||||||
|
# Concatentate the parent directory, site name and 'Processed'
|
||||||
|
# to create the output site_path
|
||||||
|
site_name = beach.site_name
|
||||||
|
site_path = path.join(parent_directory, site_name, 'Processed')
|
||||||
|
|
||||||
|
# Download the images for a given site
|
||||||
|
logger.info(f"Downloading images for {beach.site_name}")
|
||||||
|
from_spotteron(beach.root_id, site_path, site_name, limit = beach.limit, overwrite = False)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,210 @@
|
|||||||
|
"""
|
||||||
|
test register cli
|
||||||
|
"""
|
||||||
|
import datetime
|
||||||
|
import shutil
|
||||||
|
from enum import Enum
|
||||||
|
from functools import cached_property
|
||||||
|
from itertools import product
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import attr
|
||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import typer
|
||||||
|
from loguru import logger
|
||||||
|
from moviepy.editor import *
|
||||||
|
|
||||||
|
from utils import divide_chunks, datenum_to_datetime, nearest
|
||||||
|
|
||||||
|
from PIL import Image, ImageFont, ImageDraw
|
||||||
|
import os
|
||||||
|
from time import strptime
|
||||||
|
from dateutil import tz
|
||||||
|
|
||||||
|
import openpyxl
|
||||||
|
import scipy.io as sio
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Enables command-line interface
|
||||||
|
app = typer.Typer()
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------#
|
||||||
|
# Update this file path for Image Tagging - Tide Data
|
||||||
|
# Example: parent_dir = '/Users/admin/OneDrive - UNSW/My files/CoastSnap/'
|
||||||
|
|
||||||
|
parent_dir = r"C:\Users\z5079346\OneDrive - UNSW\My files\CoastSnap"
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------#
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def tagging(
|
||||||
|
folder: Path = typer.Argument(None, help="Folder with images"),
|
||||||
|
photoshop: bool = typer.Option(True, help="Have these images been registered with photoshop?"),
|
||||||
|
tide: bool = typer.Option(False, help="Do you want to add the tide to the image tag?")
|
||||||
|
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Tags images based on file name.
|
||||||
|
- Requires font file in coastsnap/fonts directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
from PIL import Image, ImageFont, ImageDraw # Throws errors if this isn't here..
|
||||||
|
|
||||||
|
# Get image paths
|
||||||
|
img_paths = [x for x in Path(folder).glob("*.jpg")]
|
||||||
|
img_names = [str(x) for x in img_paths]
|
||||||
|
logger.info(f"Tagging {len(img_names)} images")
|
||||||
|
|
||||||
|
# Initialise white text box
|
||||||
|
rect_height = 1
|
||||||
|
width = 1
|
||||||
|
|
||||||
|
# Check whether the directory 'tagged' exists or not
|
||||||
|
path_name = img_names[0]
|
||||||
|
tagged_dir = str(Path(path_name).parent) + '/tagged'
|
||||||
|
isExist = os.path.exists(tagged_dir)
|
||||||
|
if not isExist:
|
||||||
|
# Create a new directory because it does not exist
|
||||||
|
os.makedirs(tagged_dir)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# # Set Image Label Info Based on Target Image
|
||||||
|
# target_image_path = str(Path(path_name).parent.parent.parent) + r"\Target Image\Target.JPG"
|
||||||
|
# print(target_image_path)
|
||||||
|
# image = Image.open(target_image_path)
|
||||||
|
# img_fraction = 0.6 # Change this to change the font size
|
||||||
|
# fontsize = 1
|
||||||
|
|
||||||
|
# # White Text Box
|
||||||
|
# width, height = image.size
|
||||||
|
# rect_height = height/20
|
||||||
|
# rect_width = width/1.5
|
||||||
|
# font = ImageFont.truetype("fonts/Courier New Bold.ttf", fontsize)
|
||||||
|
|
||||||
|
# while font.getsize(txt)[0] < img_fraction*image.size[0]:
|
||||||
|
# # iterate until the text size is just larger than the criteria
|
||||||
|
# fontsize += 1
|
||||||
|
# font = ImageFont.truetype("fonts/Courier New Bold.ttf", fontsize)
|
||||||
|
|
||||||
|
#"C:\Users\z5079346\OneDrive - UNSW\My files\CoastSnap\Images\cathieillaroo\Registered\2022"
|
||||||
|
#"C:\Users\z5079346\OneDrive - UNSW\My files\CoastSnap\Images\cathieillaroo\Target Image\Target.JPG"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Tide Data
|
||||||
|
if tide:
|
||||||
|
# Retrieve the site name from the first image
|
||||||
|
filename = Path(img_names[0]).name
|
||||||
|
if photoshop == True:
|
||||||
|
fname = filename[6:-4]
|
||||||
|
else:
|
||||||
|
fname = filename
|
||||||
|
filename_list = fname.split(".")
|
||||||
|
site_name = filename_list[6]
|
||||||
|
|
||||||
|
# Retrieve tide data for the given site
|
||||||
|
db = openpyxl.load_workbook(parent_dir + "Database/CoastSnapDB.xlsx")
|
||||||
|
beach_data = db[site_name]
|
||||||
|
tide_filename = beach_data["B24"].value
|
||||||
|
mat_path = parent_dir + 'Tide Data/' + tide_filename
|
||||||
|
print("Loading tide data... (this may take half a minute)")
|
||||||
|
mat = sio.loadmat(mat_path)
|
||||||
|
tide_dict = mat['tide']
|
||||||
|
ts = tide_dict[0][0] # Format of tide/time Matlab data is gross
|
||||||
|
tt = list(ts[0]) # ts[0] = tides, ts[1] = times, ts[2] =
|
||||||
|
tide_times = [datenum_to_datetime(i) for i in tt]
|
||||||
|
|
||||||
|
fontsize = 1
|
||||||
|
for index, img in enumerate(sorted(img_names)):
|
||||||
|
print("Image " + str(index+1))
|
||||||
|
filename = str(Path(img).name)
|
||||||
|
image = Image.open(img)
|
||||||
|
draw = ImageDraw.Draw(image)
|
||||||
|
|
||||||
|
|
||||||
|
# Retrieve tag information from file name
|
||||||
|
if photoshop == True:
|
||||||
|
fname = filename[6:-4]
|
||||||
|
else:
|
||||||
|
fname = filename
|
||||||
|
|
||||||
|
filename_list = fname.split(".")
|
||||||
|
posix_time = filename_list[0]
|
||||||
|
date = filename_list[3].split("_")
|
||||||
|
hour = date[1]
|
||||||
|
minute = date[2]
|
||||||
|
second = date[3]
|
||||||
|
day = date[0]
|
||||||
|
month = '{:02d}'.format(strptime(filename_list[2],'%b').tm_mon) # Ensure 2-digit format
|
||||||
|
year = filename_list[5]
|
||||||
|
timezone = filename_list[4]
|
||||||
|
if 'snap' in filename_list:
|
||||||
|
contributor = filename_list[8] # Mitch filename format
|
||||||
|
else:
|
||||||
|
contributor = filename_list[6] # Leaman filename format
|
||||||
|
|
||||||
|
# Retrieve tide data
|
||||||
|
if tide:
|
||||||
|
|
||||||
|
# Account for daylight savings
|
||||||
|
# ASSUMPTION: All .mat tide files are in AEST.
|
||||||
|
if timezone == 'AEDT':
|
||||||
|
hour = str(int(hour) - 1)
|
||||||
|
date_string = year + '-' + month + '-' + day + ' ' + hour + ':' + minute + ':' + second
|
||||||
|
img_datetime = datetime.strptime(date_string, "%Y-%m-%d %H:%M:%S") # Image date/time as a datetime object
|
||||||
|
|
||||||
|
tide_date = nearest(tide_times, img_datetime)
|
||||||
|
mat_index = tide_times.index(tide_date) # Retrieve the index of the .mat tide/time
|
||||||
|
mat_tide = round(ts[1][mat_index][0], 2) # Associated tide
|
||||||
|
|
||||||
|
print('Image date/time: ' + date_string)
|
||||||
|
print('Tide record: ' + str(tide_date))
|
||||||
|
|
||||||
|
|
||||||
|
# Image tag
|
||||||
|
if tide:
|
||||||
|
txt = ('Date:' + year + '/' + month + '/' + day +
|
||||||
|
' Time:' + hour + ':' + minute +
|
||||||
|
' Contributor:' + contributor +
|
||||||
|
' Tide:' + str(mat_tide) + 'm AHD')
|
||||||
|
else:
|
||||||
|
txt = ('Date:' + year + '/' + month + '/' + day +
|
||||||
|
' Time:' + hour + ':' + minute +
|
||||||
|
' Contributor:' + contributor)
|
||||||
|
|
||||||
|
|
||||||
|
# Set the fontsize, such that the tag covers 50% the width of the first image
|
||||||
|
if index == 0:
|
||||||
|
img_fraction = 0.6 # Change this to change the font size
|
||||||
|
|
||||||
|
# White Text Box
|
||||||
|
width, height = image.size
|
||||||
|
rect_height = height/20
|
||||||
|
rect_width = width/1.5
|
||||||
|
font = ImageFont.truetype("fonts/Courier New Bold.ttf", fontsize)
|
||||||
|
|
||||||
|
while font.getsize(txt)[0] < img_fraction*image.size[0]:
|
||||||
|
# iterate until the text size is just larger than the criteria
|
||||||
|
fontsize += 1
|
||||||
|
font = ImageFont.truetype("fonts/Courier New Bold.ttf", fontsize)
|
||||||
|
font = ImageFont.truetype("fonts/Courier New Bold.ttf", fontsize)
|
||||||
|
|
||||||
|
# Create white text box
|
||||||
|
draw.rectangle((0, 0, width, rect_height), fill='white')
|
||||||
|
|
||||||
|
# Tag image with text
|
||||||
|
draw.text((20, rect_height/4),txt, font = font, fill=(0, 0, 0))
|
||||||
|
|
||||||
|
new_name = fname[:-4] + '_registered.jpg'
|
||||||
|
print(new_name + '\n')
|
||||||
|
new_path = tagged_dir + "/" + new_name
|
||||||
|
image.save(new_path)
|
||||||
|
|
||||||
|
logger.info(f"Tagged Images Saved")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
|
|
Loading…
Reference in New Issue