#Sep 2018 version of the NARCLIM suite of codes for OEH tech guide

Development1
tinoheimhuber 6 years ago
parent 0a8d32769e
commit 0cd9a32a19

@ -17,12 +17,13 @@
# 'wssmean' Surface wind speed standard_name: air_velocity units: m s-1
# 'sstmean' Sea surface temperatuer daily mean
Clim_Var <- 'rldsmean'
Clim_Var <- 'evspsblmean'
Datatype <- 'T_GCMS' #T_GCMS for GCM forcing, T_NNRP for reanalysis (only 1950-2009)
Biasboolean <- 'False' #use bias corrected data?
Location <- 'catchment' #location of coordinate
Directory <- 'C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Data/NARCLIM_Site_CSVs/CASESTUDY2/'
Filename <- 'CASESTDUY2_NARCLIM_Point_Sites.csv'
Filename <- 'CASESTUDY2_NARCLIM_Point_Sites.csv'
#Load CSV with location names and lat lon coordinates
Location.df <- data.frame(read.csv(paste(Directory, Filename, sep=""), header=T, fileEncoding="UTF-8-BOM"))
@ -34,7 +35,7 @@ text1 <- c(paste("Datatype='",Datatype,"'", sep=""),
paste("Bias_corrected='",Biasboolean,"'", sep=""), paste("ClimVarName='",Clim_Var,"'", sep=""))
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, text1)
for (i in 1:(length(Location.df$Name))){
name<-as.character(Location.df$Name[i])
name <-as.character(Location.df$Name[i])
#name<-gsub('([[:punct:]])|\\s+','_',name)
# if(i<10){
# name<-paste('Catchment_0', as.character(i), sep="")
@ -46,6 +47,10 @@ for (i in 1:(length(Location.df$Name))){
if (Clim_Var == 'sstmean'){
latitude=round(as.numeric(Location.df$Ocean_Lat[i]),3)
longitude=round(as.numeric(Location.df$Ocean_Long[i]),3)
}
if (Location == 'catchment'){
latitude=round(as.numeric(Location.df$Catch_Lat[i]),3)
longitude=round(as.numeric(Location.df$Catch_Lon[i]),3)
}
text <- c(paste("latitude=",latitude,"", sep=""), paste("longitude=",longitude,"", sep=""),
paste("name='",name,"'", sep=""),
@ -56,7 +61,7 @@ P1_NARCliM_NC_to_CSV_CCRC_SS.py \\
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, text)
if(i==10|i==20|i==31|i==length(Location.df$Name)){
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, " ")
text.file.name <- paste(Directory ,'/',Clim_Var, "_", Datatype, "_", Biasboolean,substring(as.character(i), 1,1), "_", ".txt", sep="")
text.file.name <- paste(Directory ,'/',Clim_Var, "_", Datatype, "_", Biasboolean,substring(as.character(i), 1,1), "_",Location, ".txt", sep="")
#open and fill text file
fileConn <- file(text.file.name)
writeLines(Vector.for.command.line.txt, fileConn)

@ -0,0 +1,81 @@
#code for preparing a text file with BASH code for batch download of NARCLIM data for the HUNTER WQ modeling of
#future climate scenarios
#NARCLIM Variables
#evspsblmean water_evaporation flux (actual ET) long_name: Surface evaporation standard_name: water_evaporation_flux units: kg m-2 s-1
#tasmean mean near surface temperature
#pracc precipitation daily precipitation sum (sum of convective prcacc and stratiform prncacc precip)
Clim_Var <- 'sstmean'
Datatype <- 'T_NNRP' #T_GCMS for GCM forcing, T_NNRP for reanalysis (only 1950-2009)
Biasboolean <- 'False' #use bias corrected data? True or False python boolean
Directory <- 'C:/Users/z5025317/OneDrive - UNSW/Hunter_CC_Modeling/07_Modelling/01_Input/BCGeneration/catchments/'
Filename <- 'Catchment_Prev_Hunter_Model_Centroids_VH_WGS84_attribute_Table.csv'
Directory <- 'C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Data/NARCLIM_Site_CSVs/CASESTUDY_GAB/'
Filename <- 'CASESTUDY2_GAB_NARCLIM_Point_Sites_V4.csv'
#Load CSV with location names and lat lon coordinates
Location.df <- data.frame(read.csv(paste(Directory, Filename, sep=""), header=T))
for(type in c('min','max')){
#create empty vector for storing the command line text and open file
Vector.for.command.line.txt <- c()
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, "module load python")
text1 <- c(paste("Datatype='",Datatype,"'", sep=""),
paste("Bias_corrected='",Biasboolean,"'", sep=""), paste("ClimVarName='",Clim_Var,"'", sep=""))
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, text1)
for (i in 1:(length(Location.df[,1]))){
#for (i in c(1,5,10)){ if only subset of catchments is needed like for Met file.
#name<-as.character(Location.df$Name[i])
#name<-gsub('([[:punct:]])|\\s+','_',name)
if(type == 'min'){
#latlongs <- as.character(Location.df[,9])
name <- as.character(Location.df[,1][i])
latitude <- as.character(Location.df[,2][i])
longitude <- as.character(Location.df[,3][i])
}else if(type == 'max'){
name <- as.character(Location.df[,6][i])
latitude<-as.character(Location.df[,7][i])
longitude<-as.character(Location.df[,8][i])
}
# print(latlongs[i])
# names <- as.character(Location.df[,17])
# if(i<10){
# name<-paste('Plant_0', as.character(i), sep="")
# }else{
# name<-paste('Plant_', as.character(i), sep="")
# }
#print(name)
#print(names[i])
# latmin <- substr(latlongs[i], 5, 10)
# lonmin <- substr(latlongs[i], 18, 23)
# latmax <- substr(latlongs[i], 32, 37)
# lonmax <- substr(latlongs[i], 45, 50)
# latitude=latmax
# longitude=lonmax
text <- c(paste("latitude=",latitude,"", sep=""), paste("longitude=",longitude,"", sep=""),
paste("name='",name,"'", sep=""),
"python /srv/ccrc/data02/z5025317/Code_execution/\\
P1_NARCliM_NC_to_CSV_CCRC_SS.py \\
--lat $latitude --lon $longitude --varName $ClimVarName --domain 'd01' --timestep \\
'DAY' --LocationName $name --Datatype $Datatype --BiasBool $Bias_corrected")
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, text)
if(i==10|i==20|i==30|i==40|i==45){
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, " ")
text.file.name <- paste('C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Data/NARCLIM_Site_CSVs/CASESTUDY_GAB/DataDownloadCCRC/',Clim_Var, "_", Datatype, "_", Biasboolean,as.character(i), type, ".txt", sep="")
#open and fill text file
fileConn <- file(text.file.name)
writeLines(Vector.for.command.line.txt, fileConn)
close(fileConn)
Vector.for.command.line.txt <- c()
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, "module load python")
text1 <- c(paste("Datatype='",Datatype,"'", sep=""),
paste("Bias_corrected='",Biasboolean,"'", sep=""), paste("ClimVarName='",Clim_Var,"'", sep=""))
Vector.for.command.line.txt <- c(Vector.for.command.line.txt, text1)
}
}
}

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
#==========================================================#
#Last Updated - March 2018
#@author: z5025317 Valentin Heimhuber
@ -34,34 +34,38 @@ os.chdir('C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdo
#loop through case study estuaries
Estuaries = ['HUNTER', 'RICHMOND', 'NADGEE', 'SHOALHAVEN', 'GEORGES','CATHIE']
Estuaries = ['HUNTER']
Estuaries = ['NADGEE','HUNTER', 'RICHMOND']
for Est in Estuaries:
#==========================================================#
#set input parameters
#==========================================================#
Case_Study_Name = 'CASESTUDY2'
Casestudy2_csv_path = "Data/NARCLIM_Site_CSVs/CASESTUDY2/CASESTDUY2_NARCLIM_Point_Sites.csv"
Version = "V4"
OutVersion = "V4"
Casestudy2_csv_path = 'Data/NARCLIM_Site_CSVs/' + Case_Study_Name + '/' + Case_Study_Name + '_NARCLIM_Point_Sites.csv'
#Estuary = 'HUNTER' # 'Belongil'
Estuary = Est # 'Belongil'
print Estuary
Base_period_start = '1970-01-01' #Start of interval for base period of climate variability
Base_period_start = '1950-01-01' #Start of interval for base period of climate variability
Base_period_DELTA_start = '1990-01-01' #Start of interval used for calculating mean and SD for base period (Delta base period)
Base_period_end = '2009-01-01' #use last day that's not included in period as < is used for subsetting
Clim_var_type = 'pracc' #Name of climate variable in NARCLIM models '*' will create pdf for all variables in folder
Stats = 'days_h_30' #'maxdaily' #maximum takes the annual max Precipitation instead of the sum
Stats = 'days_h_20' #'maxdaily' #maximum takes the annual max Precipitation instead of the sum 'days_h_30'
PD_Datasource = 'Station' #Source for present day climate data (historic time series) can be either: 'Station' or 'SILO'
SILO_Clim_var = ['max_temp'] #select the SILO clim variable to be used for base period. - see silo.py for detailed descriptions
PD_Datasource = 'SILO' #Source for present day climate data (historic time series) can be either: 'Station' or 'SILO'
SILO_Clim_var = ['daily_rain'] #select the SILO clim variable to be used for base period. - see silo.py for detailed descriptions
force_sea_levels = 'no' #use fort denison sea levels to run the statistics
Location = 'Estuary' # pick locaiton for extracting the SILO data can be: Estuary, Catchment, or Ocean
Main_Plot = 'yes'
presentdaybar = False #include a bar for present day variability in the plots?
present_day_plot = 'no' #save a time series of present day
Allin1_delta_plot = 'yes'
Median_markers = 'no' #plot the median horizontal lines on top of the delta range in the allin1 plot?
Figure_headings = 'no'
Version = "V1"
Deltaxmin, Deltaymin = -20,40 #y limits for 0 baseline delta plots
Median_markers = 'yes' #plot the median horizontal lines on top of the delta range in the allin1 plot?
Figure_headings = 'yes'
ALPHA_figs = 0 #Set alpha of figure background (0 makes everything around the plot panel transparent)
font = {'family' : 'sans-serif',
'weight' : 'normal',
@ -72,7 +76,7 @@ for Est in Estuaries:
#==========================================================#
#set directory path for output files
output_directory = 'Output/' + Case_Study_Name + '/' + Estuary + '/' + '/Clim_Deviation_Plots/'
output_directory = 'Output/' + Case_Study_Name + '_' + OutVersion + '/' + Estuary + '/' + '/Clim_Deviation_Plots/'
#output_directory = 'J:/Project wrl2016032/NARCLIM_Raw_Data/Extracted'
if not os.path.exists(output_directory):
os.makedirs(output_directory)
@ -86,7 +90,7 @@ for Est in Estuaries:
#==========================================================#
#read CSV file
#==========================================================#
Clim_Var_CSVs = glob.glob('./Output/' + Case_Study_Name + '/' + Estuary + '/' + Estuary + '_' + Clim_var_type + '_' + Stats + '*')
Clim_Var_CSVs = glob.glob('./Output/' + Case_Study_Name + '_' + Version + '/' + Estuary + '/' + Estuary + '_' + Clim_var_type + '_' + Stats + '*')
clim_var_csv_path = Clim_Var_CSVs[0]
Filename = os.path.basename(os.path.normpath(clim_var_csv_path))
Clim_var_type = Filename.split('_', 2)[1]
@ -117,6 +121,17 @@ for Est in Estuaries:
if PD_Datasource == 'Station':
Present_day_df,minplotDelta, maxplotDelta = fct.import_present_day_climdata_csv(Estuary, Clim_var_type)
if force_sea_levels == 'yes':
Estuary_Folder = "C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Data/Ocean_Data/Tide_Levels"
Present_day_df = pd.read_csv(Estuary_Folder + '/Fort_Denison_Daily_1900-2011.csv' , parse_dates=True, index_col=0)
Present_day_df = pd.read_csv(Estuary_Folder + '/Fort_Denison_Daily_1900-2011.csv' , parse_dates=True, index_col=0)
Present_day_df = Present_day_df.interpolate()
Present_day_df = pd.DataFrame(Present_day_df.loc[(Present_day_df.index >= Base_period_start) & (Present_day_df.index <= Base_period_end)])
[minplotDelta, maxplotDelta]=[1, 1]
df = pd.read_csv(Estuary_Folder + '/Fort_Denison_Sub_Daily_1900-2011.csv', parse_dates=[['Date', 'Time']])
df.index = pd.to_datetime(df.Date, format = 'dd/mm/Y') + pd.to_timedelta(df.Hour, unit='h')
if PD_Datasource == 'SILO':
#read the CSV to extract the lat long and case stuy sites
with open(Casestudy2_csv_path, mode='r') as infile:
@ -162,7 +177,7 @@ for Est in Estuaries:
#substract a constant from all values to convert from kelvin to celcius (temp)
if Clim_var_type == 'sstmean':
Present_day_df = Present_day_df.iloc[:,0:(len(Present_day_df)-1)]-273.15
#Present_day_df = Present_day_df.iloc[:,0:(len(Present_day_df)-1)]-273.15
Present_day_df.index = Present_day_df.index.to_period('D')
#==========================================================#
#create seasonal sums etc.
@ -172,7 +187,12 @@ for Est in Estuaries:
Present_day_df_annual = Present_day_df_annual.replace(0, np.nan)
Fdf_Seas_means = Present_day_df.resample('Q-NOV').max() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
if(Stats[:4] =='days'):
elif Stats == 'maxmonthly':
Present_day_df_annual = Present_day_df.resample('M').max().resample('A').mean()
Present_day_df_annual = Present_day_df_annual.replace(0, np.nan)
Fdf_Seas_means = Present_day_df.resample('M').max().resample('Q-NOV').mean() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
elif(Stats[:4] =='days'):
Threshold = int(Stats[-2:])
agg = ('>'+ str(Threshold) + '_count', lambda x: x.gt(Threshold).sum()),
Present_day_df_annual = Present_day_df.resample('A').agg(agg)
@ -191,7 +211,30 @@ for Est in Estuaries:
Present_day_df_weekly = Present_day_df.resample('W').mean()
Fdf_Seas_means = Present_day_df.resample('Q-NOV').mean() #seasonal means
#==========================================================#
#==========================================================#
#calculate linear trend in present day series
#==========================================================#
if Stats == 'maxdaily' or Stats[:4] =='days' or Stats == 'maxmonthly':
coefficients, residuals, _, _, _ = np.polyfit(range(len(Present_day_df_annual.index)),Present_day_df_annual.values,1,full=True)
#mse = residuals[0]/(len(selected.index))
#nrmse = np.sqrt(mse)/(selected.max() - selected.min())
trend = coefficients[0]
print('Slope ' + str(coefficients[0]))
print(Base_period_start[:4]+ '-' + Base_period_end[:4] + ' trend was: ' + str(np.round(trend,4)) + ' units/year')
elif Stats == 'mean':
coefficients, residuals, _, _, _ = np.polyfit(range(len(Present_day_df.index)),Present_day_df.values,1,full=True)
#mse = residuals[0]/(len(selected.index))
#nrmse = np.sqrt(mse)/(selected.max() - selected.min())
if force_sea_levels == 'yes':
trend = coefficients[0]*12
else:
trend = coefficients[0]*365
print('Slope ' + str(coefficients[0]))
print(Base_period_start[:4]+ '-' + Base_period_end[:4] + ' trend was: ' + str(np.round(trend,5)) + ' units/year')
#==========================================================#
#==========================================================#
#Loop through annual and seasons and create a deviation plot for each.
@ -224,7 +267,8 @@ for Est in Estuaries:
if temp == 'SON':
Mean_df = Fdf_Seas_means[Fdf_Seas_means.index.quarter==4]
Column_names = ['Spring_near', 'Spring_far']
Present_Day_ref_df = Mean_df
Present_Day_ref_df = Mean_df
#Subset to present day variability period
Present_Day_ref_df = pd.DataFrame(Present_Day_ref_df.loc[(Present_Day_ref_df.index >= Base_period_start) & (Present_Day_ref_df.index <= Base_period_end)])
#Subset to present day variability delta base period (this is the statistical baseline used for present day conditions)
@ -284,8 +328,12 @@ for Est in Estuaries:
Plot_in_df3['near future'] = df.iloc[1,0]
Plot_in_df3['far future'] = df.iloc[0,0]
Plot_in_df3 = Plot_in_df3.transpose()
plt.plot(Plot_in_df3['Med'], linestyle="", markersize=45,
marker="_", color='darkblue', label="Median")
# plt.plot(Plot_in_df3['Med'], linestyle="", markersize=45,
# marker="_", color='red', label="Median")
#Plot_in_df3['Med']
#Plot_in_df3['Med'].values
plt.plot(np.arange(2),Plot_in_df3['Med'].values[::-1], linestyle="", markersize=45,
marker="_", color='red', label="Median")
z = plt.axhline(float(Present_Day_Mean-2*Present_Day_SD), linestyle='-', color='black', alpha=.5)
z.set_zorder(-1)
z = plt.axhline(float(Present_Day_Mean+2*Present_Day_SD), linestyle='-', color='black', alpha=.5)
@ -296,10 +344,10 @@ for Est in Estuaries:
z.set_zorder(-1)
z = plt.axhline(float(Present_Day_Mean), linestyle='--', color='red', alpha=.5)
z.set_zorder(-1)
#plt.ylim(xmin, xmax)
plt.ylim(10, 140)
plt.ylim(xmin, xmax)
#plt.ylim(10, 140)
if Figure_headings == 'yes':
plt.title(Clim_var_type + ' ' + temp )
plt.title(Clim_var_type + ' ' + temp)
ax.grid(False)
for tick in ax.get_xticklabels():
tick.set_rotation(0)
@ -321,17 +369,20 @@ for Est in Estuaries:
z = plt.axhline(float(Present_Day_Mean), linestyle='--', color='red', alpha=.5)
z.set_zorder(-1)
#fig.patch.set_facecolor('deepskyblue')
plt.ylim(0, 40)
fig.tight_layout()
fig.patch.set_alpha(ALPHA_figs)
if Figure_headings == 'yes':
plt.title(Clim_var_type + ' ' + Stats + ' ' + temp +' present day')
#plt.ylim(xmin, xmax)
#plt.title(Clim_var_type)
#plt.title(Clim_var_type + ' ' + Stats + ' ' + Base_period_start[:4] + '-' + Base_period_end[:4] + ' trend:' + str(np.round(trend,4)) + '/year')
plt.title(Base_period_start[:4] + '-' + Base_period_end[:4] + ' trend:' + str(np.round(trend,4)) + '/year')
plt.ylim(xmin, xmax)
#plt.ylim(0, 40)
ax.grid(False)
#if temp == 'MAM':
i=i+4
else:
i=i+1
#create a data frame that contains all of the delta bars
Plot_in_df_tp.index = Column_names
Plot_in_df_tp['-2std'] = Plot_in_df_tp['-2std'] - Present_Day_Mean
@ -349,11 +400,12 @@ for Est in Estuaries:
else:
Plot_in_df_tp.plot.bar(stacked=True, color=uni_colors, edgecolor='none', legend=False, width=0.5,ax=ax)
if Median_markers == 'yes':
plt.plot(Plot_in_df_tp.index, Plot_in_df_tp['Median'], linestyle="", markersize=13,
marker="_", color='darkblue', label="Median")
plt.plot(np.arange(2),Plot_in_df_tp['Median'].values , linestyle="", markersize=45,
marker="_", color='red', label="Median")
z = plt.axhline(float(0), linestyle='--', color='red', alpha=.5)
z.set_zorder(-1)
plt.ylim(-1, 20)
plt.ylim(Deltaxmin, Deltaymin)
#plt.ylim(xmin, xmax)
if Figure_headings == 'yes':
plt.title(Clim_var_type + ' All Deltas ' + Stats)
#ax.grid(False)
@ -366,7 +418,7 @@ for Est in Estuaries:
#plt.show()
if Main_Plot == 'yes':
if presentdaybar == False:
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + PD_Datasource + '_' + SILO_Clim_var[0] + '_' + Version + '_' + '_NPDB.png'
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + PD_Datasource + '_' + SILO_Clim_var[0] + '_' + '_' + Base_period_start + '_' + Base_period_end + '_' + Version + '_' + ' trend_' + str(np.round(trend[0],4)) + '.png'
else:
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + PD_Datasource + '_' + SILO_Clim_var[0] + '_' + Version + '_' + '2.png'
out_path = output_directory + '/' + out_file_name
@ -387,17 +439,21 @@ for Est in Estuaries:
else:
Plot_in_df_tp.plot.bar(stacked=True, color=uni_colors, edgecolor='none', legend=False, width=0.5,ax=ax)
if Median_markers == 'yes':
plt.plot(Plot_in_df_tp.index, Plot_in_df_tp['Median'], linestyle="", markersize=13,
marker="_", color='darkblue', label="Median")
# plt.bar(Plot_in_df_tp.index, Plot_in_df_tp['Median'], linestyle="", markersize=13,
# marker="_", color='red', label="Median")
plt.plot(np.arange(10), Plot_in_df_tp['Median'], linestyle="", markersize=13,
marker="_", color='red', label="Median")
z = plt.axhline(float(0), linestyle='--', color='red', alpha=.5)
z.set_zorder(-1)
plt.ylim(-5, 10)
plt.ylim(Deltaxmin, Deltaymin)
if Figure_headings == 'yes':
plt.title(Clim_var_type + ' All Delstas ' + Stats)
#ax.grid(False)
ax.xaxis.grid(False)
for tick in ax.get_xticklabels():
tick.set_rotation(90)
tick.set_rotation(50)
plt.setp(ax.xaxis.get_majorticklabels(), ha='right')
#ax.set_xticklabels(xticklabels, rotation = 45, ha="right")
fig.tight_layout()
fig.patch.set_alpha(ALPHA_figs)
plt.show()
@ -450,7 +506,8 @@ for Est in Estuaries:
z.set_zorder(-1)
#fig.patch.set_facecolor('deepskyblue')
fig.patch.set_alpha(0)
plt.ylim(0, 400)
#plt.ylim(0, 400)
plt.ylim(xmin, xmax)
#export plot to png
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + Base_period_start + '_' + Base_period_end + '_' + Version + 'Present_Day_Period.png'
out_path = output_directory + '/' + out_file_name

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 28 12:15:17 2018
@ -53,16 +53,16 @@ Estuary = 'HUNTER' # 'Belongil'
Base_period_start = '1970-01-01' #Start of interval for base period of climate variability
Base_period_DELTA_start = '1990-01-01' #Start of interval used for calculating mean and SD for base period (Delta base period)
Base_period_end = '2009-01-01' #use last day that's not included in period as < is used for subsetting
Clim_var_type = 'Acidity' #Name of climate variable in NARCLIM models '*' will create pdf for all variables in folder
[minplotDelta, maxplotDelta]=[0,1]
Clim_var_type = 'acidity' #Name of climate variable in NARCLIM models '*' will create pdf for all variables in folder
[minplotDelta, maxplotDelta]=[0.5,1]
#Source for present day climate data (historic time series) can be either: 'Station' or 'SILO'
Location = 'Estuary' # pick locaiton for extracting the SILO data can be: Estuary, Catchment, or Ocean
presentdaybar = False #include a bar for present day variability in the plots?
present_day_plot = 'yes' #save a time series of present day
Version = "V1"
Stats = 'mindaily' #'maxdaily' #maximum takes the annual max Precipitation instead of the sum
Version = "V3"
Stats = 'mean' #'maxdaily' #maximum takes the annual max Precipitation instead of the sum
ALPHA_figs = 1 #Set alpha of figure background (0 makes everything around the plot panel transparent)
#==========================================================#
@ -70,7 +70,7 @@ ALPHA_figs = 1 #Set alpha of figure background (0 makes everything around the pl
#==========================================================#
#set directory path for output files
output_directory = 'Output/' + Case_Study_Name + '/' + Estuary + '/' + '/Clim_Deviation_Plots/'
output_directory = 'Output/' + Case_Study_Name +'_'+ Version + '/' + Estuary + '/' + '/Clim_Deviation_Plots/'
#output_directory = 'J:/Project wrl2016032/NARCLIM_Raw_Data/Extracted'
if not os.path.exists(output_directory):
os.makedirs(output_directory)
@ -116,6 +116,9 @@ Present_day_df= Present_day_df['PH']
if Stats == 'mindaily':
Present_day_df_annual = Present_day_df.resample('A').min()
Present_day_df_annual = Present_day_df_annual.replace(0, np.nan)
if Stats == 'minmonthly':
Present_day_df_annual = Present_day_df.resample('M').resample('A').min()
Present_day_df_annual = Present_day_df_annual.replace(0, np.nan)
if Stats == 'mean':
Present_day_df_annual = Present_day_df.resample('A').mean()
Present_day_df_annual = Present_day_df_annual.replace(0, np.nan)
@ -146,10 +149,10 @@ index=['-2std', '-1std', 'Med', '1std', '2std']
columns = ['near future', 'far future']
Plot_in_df2 = pd.DataFrame(index=index, columns =columns )
Plot_in_df2['near future'] = [float(Present_Day_Mean + Ensemble_Delta_df.near['10th']),np.NaN, float(Ensemble_Delta_df.near['Median']-Ensemble_Delta_df.near['10th']),
np.NaN, float(Ensemble_Delta_df.near['90th']-Ensemble_Delta_df.near['Median'])]
Plot_in_df2['far future'] = [float(Present_Day_Mean + Ensemble_Delta_df.far['10th']),np.NaN, float(Ensemble_Delta_df.far['Median']-Ensemble_Delta_df.far['10th']),
np.NaN, float(Ensemble_Delta_df.far['90th']-Ensemble_Delta_df.far['Median'])]
Plot_in_df2['near future'] = [float(Present_Day_Mean + Ensemble_Delta_df.near['90th']),np.NaN, float(Ensemble_Delta_df.near['Median']-Ensemble_Delta_df.near['90th']),
np.NaN, float(Ensemble_Delta_df.near['10th']-Ensemble_Delta_df.near['Median'])]
Plot_in_df2['far future'] = [float(Present_Day_Mean + Ensemble_Delta_df.far['90th']),np.NaN, float(Ensemble_Delta_df.far['Median']-Ensemble_Delta_df.far['90th']),
np.NaN, float(Ensemble_Delta_df.far['10th']-Ensemble_Delta_df.far['Median'])]
#transpose the data frame
Plot_in_df_tp = Plot_in_df2.transpose()
#do the individual plots
@ -172,7 +175,7 @@ Plot_in_df3['near future'] = df.iloc[1,0]
Plot_in_df3['far future'] = df.iloc[0,0]
Plot_in_df3 = Plot_in_df3.transpose()
plt.plot(Plot_in_df3['Med'], linestyle="", markersize=52,
marker="_", color='darkblue', label="Median")
marker="_", color='red', label="Median")
z = plt.axhline(float(Present_Day_Mean-2*Present_Day_SD), linestyle='-', color='black', alpha=.5)
z.set_zorder(-1)
z = plt.axhline(float(Present_Day_Mean+2*Present_Day_SD), linestyle='-', color='black', alpha=.5)
@ -212,7 +215,7 @@ plt.ylim(xmin, xmax)
ax.grid(False)
if presentdaybar == False:
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + Base_period_start + '_' + Base_period_end + Version + '_' + '_NPDB.png'
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_' + Base_period_start + '_' + Base_period_end + Version + '_' + '_NPDB2.png'
out_path = output_directory + '/' + out_file_name
fig.savefig(out_path)

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
`# -*- coding: utf-8 -*-
from netCDF4 import *
import numpy as np
from numpy import *
@ -152,6 +152,7 @@ if Bias_Correction_BOOL == 'True':
#time.sleep(10)
#set up the loop variables for interrogating the entire NARCLIM raw data
GCMs = ('CCCMA3.1','CSIRO-MK3.0','ECHAM5', 'MIROC3.2', 'NNRP')
#set up the loop variables for interrogating the entire NARCLIM raw data
#
#Define empty pandas data frames
Full_df = pd.DataFrame()

@ -7,6 +7,7 @@ Variables available from NARCLIM (output):
pracc precipitation daily precipitation sum (sum of convective prcacc and stratiform prncacc precip)
pr1Hmaxtstep maximum 1 hour interval rainfall in a one day period
'pr1Hmaxtstep' Max. 1-hour time-window moving averaged precipitation rate units: kg m-2 s-1 maximum 1-hour time-window moving averaged values from point values 60.0 second
'pr30maxtstep' Max. 30min time-window moving averaged precipitation rate units: kg m-2 s-1 maximum 1-hour time-window moving averaged values from point values 60.0 second
'wss1Hmaxtstep' Max. 1-hour time-window moving averaged surface wind speed units: m s-1 maximum 1-hour time-window moving averaged values from point values 60.0 second
'wssmax' Surface wind speed standard_name: air_velocity units: m s-1 height: 10 m
'wssmean' Surface wind speed standard_name: air_velocity units: m s-1

@ -25,6 +25,7 @@ matplotlib.style.use('ggplot')
os.chdir('C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Analysis/Code')
import climdata_fcts as fct
import silo as sil
ALPHA_figs = 0
font = {'family' : 'sans-serif',
'weight' : 'normal',
@ -39,10 +40,11 @@ os.chdir('C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdo
#==========================================================#
#set input parameters
Case_Study_Name = 'CASESTUDY2'
Version = 'V5' #V4 is latest for all model runs #V5 is just for the Hunter catchment climatology
Estuaries = ['HUNTER', 'RICHMOND', 'NADGEE', 'SHOALHAVEN', 'GEORGES','CATHIE']
Estuaries = ['HUNTER']
Estuaries = ['HUNTER'] #,'HUNTER','RICHMOND']
#Estuaries = ['SHOALHAVEN']
for Est in Estuaries:
#Estuary = 'HUNTER' # 'Belongil'
Estuary = Est # 'Belongil'
@ -56,16 +58,17 @@ for Est in Estuaries:
#set input preferences
#==========================================================#
plot_pdf = 'no'
plot_pngs = 'yes'
plot_pngs = 'no'
delta_csv = 'yes'
Stats = 'days_h_30' # 'maxdaily', 'mean', 'days_h_25'
Version = 'V1'
#==========================================================#
Stats = 'mean' # 'maxdaily', 'mean', 'days_h_25'
DoMonthly = True
Perc_Vs_Abs = 'percent' #'percent' vs. 'absolute' deltas for near and far future
Location = 'Catchment'
#==========================================================#
#==========================================================#
#set directory path for output files
output_directory = 'Output/' + Case_Study_Name + '/' + Estuary
output_directory = 'Output/' + Case_Study_Name + '_' + Version + '/' + Estuary
#output_directory = 'J:/Project wrl2016032/NARCLIM_Raw_Data/Extracted'
if not os.path.exists(output_directory):
os.makedirs(output_directory)
@ -74,7 +77,7 @@ for Est in Estuaries:
print('-------------------------------------------')
#set directory path for individual png files
png_output_directory = 'Output/' + Case_Study_Name + '/' + Estuary + '/NARCLIM_Key_Figs'
png_output_directory = 'Output/' + Case_Study_Name + '_' + Version + '/' + Estuary + '/NARCLIM_Key_Figs'
#output_directory = 'J:/Project wrl2016032/NARCLIM_Raw_Data/Extracted'
if not os.path.exists(png_output_directory):
os.makedirs(png_output_directory)
@ -82,13 +85,13 @@ for Est in Estuaries:
print("output directory folder didn't exist and was generated")
print('-------------------------------------------')
#==========================================================#
#==========================================================#
Estuary_Folder = glob.glob('./Data/NARCLIM_Site_CSVs/'+ Case_Study_Name + '/' + Estuary + '*' )
if Location == 'Catchment':
Estuary_Folder = glob.glob('./Data/NARCLIM_Site_CSVs/'+ Case_Study_Name + '/' + Estuary + '*catch' )
Clim_Var_CSVs = glob.glob(Estuary_Folder[0] + '/' + Clim_var_type + '*')
#==========================================================#
#==========================================================#
#==========================================================#
#read CSV files and start analysis
@ -101,7 +104,7 @@ for Est in Estuaries:
Full_df = pd.read_csv(clim_var_csv_path, index_col=0, parse_dates = True)
#pandas datestamp index to period (we don't need the 12 pm info in the index (daily periods are enough))
Full_df.index = Full_df.index.to_period('D')
Full_df = Full_df.drop(columns=['period'])
#Full_df = Full_df.drop(columns=['period'])
Ncols_df = len(Full_df)
#check data types of columns
#Full_df.dtypes
@ -119,7 +122,7 @@ for Est in Estuaries:
if Clim_var_type in ['pr1Hmaxtstep']:
Full_df = Full_df.iloc[:,0:(Ncols_df-1)]*60*60
if Clim_var_type in ['rsdsmean','rldsmean']:
Full_df = Full_df.iloc[:,0:(Ncols_df-1)]*60*60*24/1000000
Full_df = Full_df.iloc[:,0:(Ncols_df-1)]*60*60*24/1000000 #convert to unit used in SILO
Fdf_1900_2080 = Full_df
#==========================================================#
@ -133,18 +136,40 @@ for Est in Estuaries:
Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').max() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
Fdf_Monthly_means = Fdf_1900_2080.resample('M').max() #seasonal means
Fdf_Monthly_means = Fdf_Monthly_means.replace(0, np.nan)
elif(Stats[:4] =='days'):
Threshold = int(Stats[-2:])
#agg = ('abobe_27_count', lambda x: x.gt(27).sum()), ('average', 'mean')
agg = ('>'+ str(Threshold) + '_count', lambda x: x.gt(Threshold).sum()),
Fdf_1900_2080_annual = Fdf_1900_2080.resample('A').agg(agg)
#Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').agg(agg) #seasonal means
#Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
Fdf_Monthly_means = Fdf_1900_2080.resample('M').agg(agg)
else:
Fdf_1900_2080_annual = Fdf_1900_2080.resample('A').sum()
Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').sum() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
Fdf_Monthly_means = Fdf_1900_2080.resample('M').sum() #Monthlyonal means
Fdf_Monthly_means = Fdf_Monthly_means.replace(0, np.nan)
else:
if(Stats == 'maxdaily'):
Fdf_1900_2080_annual = Fdf_1900_2080.resample('A').max()
Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').max() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
if(Stats[:4] =='days'):
Fdf_Monthly_means = Fdf_1900_2080.resample('M').max() #Monthlyonal means
Fdf_Monthly_means = Fdf_Monthly_means.replace(0, np.nan)
elif(Stats == 'maxmonthly'):
Fdf_1900_2080_annual = Fdf_1900_2080.resample('M').max().resample('A').mean()
Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('M').max().resample('Q-NOV').mean() #seasonal means
Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
Fdf_Monthly_means = Fdf_1900_2080.resample('M').max().resample('Q-NOV').mean() #Monthlyonal means
Fdf_Monthly_means = Fdf_Monthly_means.replace(0, np.nan)
elif(Stats[:4] =='days'):
Threshold = int(Stats[-2:])
#agg = ('abobe_27_count', lambda x: x.gt(27).sum()), ('average', 'mean')
agg = ('>'+ str(Threshold) + '_count', lambda x: x.gt(Threshold).sum()),
@ -152,9 +177,11 @@ for Est in Estuaries:
#Fdf_1900_2080_annual = Fdf_1900_2080_annual.replace(0, np.nan)
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').agg(agg) #seasonal means
#Fdf_Seas_means = Fdf_Seas_means.replace(0, np.nan)
Fdf_Monthly_means = Fdf_1900_2080.resample('M').agg(agg)
else:
Fdf_1900_2080_annual = Fdf_1900_2080.resample('A').mean()
Fdf_Seas_means = Fdf_1900_2080.resample('Q-NOV').mean() #seasonal means
Fdf_Monthly_means = Fdf_1900_2080.resample('M').mean() #seasonal means
Fdf_1900_2080_means = Fdf_1900_2080.mean()
#==========================================================#
@ -179,14 +206,20 @@ for Est in Estuaries:
#==========================================================#
#Create Deltas of average change for annual and seasonal basis
#==========================================================#
delta_all_df = fct.calculate_deltas_NF_FF2(Fdf_1900_2080_annual, Fdf_Seas_means, Stats)
delta_all_df = fct.calculate_deltas_NF_FF2(Fdf_1900_2080_annual, Fdf_Seas_means, Stats, Perc_Vs_Abs)
if DoMonthly ==True:
delta_all_df_monthly = fct.calculate_deltas_monthly(Fdf_Monthly_means, Stats, Perc_Vs_Abs)
delta_all_df_monthly = pd.concat([delta_all_df_monthly.filter(regex= 'near'),delta_all_df_monthly.filter(regex= 'far')], axis=1)[-3:]
#==========================================================#
if delta_csv == 'yes':
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_NARCliM_ensemble_changes.csv'
out_path = output_directory + '/' + out_file_name
delta_all_df.to_csv(out_path)
if delta_csv == 'yes' and DoMonthly ==True:
out_file_name = Estuary + '_' + Clim_var_type + '_' + Stats + '_NARCliM_ensemble_changes_monthly.csv'
out_path = output_directory + '/NARCLIM_Changes_Hunter/' + out_file_name
delta_all_df_monthly.to_csv(out_path)
#==========================================================#
#==========================================================#
@ -216,6 +249,9 @@ for Est in Estuaries:
'tomato', 'royalblue', 'mediumpurple' , 'tomato', 'royalblue', 'mediumpurple' , 'tomato', 'royalblue', 'mediumpurple' , 'tomato', 'royalblue', 'mediumpurple' ]
plotcolours12 = ['darkolivegreen','turquoise', 'lightgreen', 'darkgreen', 'lightpink','slateblue', 'slategray', 'orange', 'tomato', 'peru', 'navy', 'teal']
plotcolours15 = ['darkolivegreen','turquoise', 'lightgreen', 'darkgreen', 'lightpink','slateblue', 'slategray', 'orange', 'tomato', 'peru', 'navy', 'teal', 'lightgreen','lightpink','slateblue']
color_dict = {'CCCMA3.1_R1': 'darkolivegreen', 'CCCMA3.1_R2': 'turquoise', 'CCCMA3.1_R3': 'lightgreen', 'CSIRO-MK3.0_R2': 'darkgreen',
'CSIRO-MK3.0_R1':'lightpink', 'CSIRO-MK3.0_R3':'slateblue','ECHAM5_R1':'slategray', 'ECHAM5_R2': 'orange', 'ECHAM5_R3': 'tomato',
'MIROC3.2_R1': 'peru', 'MIROC3.2_R2': 'navy' ,'MIROC3.2_R3': 'teal', '10th':'lightgreen','median':'lightpink','90th':'slateblue'}
#==========================================================#
@ -284,27 +320,38 @@ for Est in Estuaries:
#=========#
# time series plot annual ALL models
#=========#
png_out_file_name = Clim_var_type + '_' + Stats + '_TimeSeries_AllMods_' + Version + '.png'
png_out_file_name = Clim_var_type + '_' + Stats + '_TimeSeries_AllMods_' + Version + '3.png'
png_out_path = png_output_directory + '/' + png_out_file_name
plt.title(Clim_var_type + ' - ' + Stats + ' - Time series - representative models')
#prepare data
Mod_order = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,19,20,21,16,17,18,22,23,24,31,32,33,25,26,27,28,29,30,34,35,36]
test = Fdf_1900_2080_annual
Mod_Names = test.columns
New_Mod_Name = []
for i in range(0,len(Mod_Names)):
if(Stats[:4] =='days'):
New_Mod_Name.append(str(Mod_order[i]+10) + '_' + Mod_Names[i][0])
else:
New_Mod_Name.append(str(Mod_order[i]+10) + '_' + Mod_Names[i])
test.columns = New_Mod_Name
test_sorted = test.reindex_axis(sorted(test.columns), axis=1)
colnamest = test.columns
test_sorted.columns = [w[3:-5] for w in colnamest]
# Mod_order = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,19,20,21,16,17,18,22,23,24,31,32,33,25,26,27,28,29,30,34,35,36]
# test = Fdf_1900_2080_annual
# Mod_Names = test.columns
# New_Mod_Name = []
# for i in range(0,len(Mod_Names)):
# if(Stats[:4] =='days'):
# New_Mod_Name.append(str(Mod_order[i]+10) + '_' + Mod_Names[i][0])
# else:
# New_Mod_Name.append(str(Mod_order[i]+10) + '_' + Mod_Names[i])
# test.columns = New_Mod_Name
# test_sorted = test.reindex_axis(sorted(test.columns), axis=1)
# colnamest = test.columns
# test_sorted.columns = [w[3:-5] for w in colnamest]
#plot
fig = plt.figure(figsize=(8,7))
ax=plt.subplot(2,1,1)
test_sorted.plot(ax=ax,color = plotcolours36)
#test_sorted.plot(ax=ax) #,color = plotcolours36)
Fdf_1900_2080_annual_b = Fdf_1900_2080_annual
New_Mod_Name = []
if(Stats[:4] =='days'):
for each in Fdf_1900_2080_annual_b.columns:
New_Mod_Name.append(each[0][:-5])
else:
for each in Fdf_1900_2080_annual_b.columns:
New_Mod_Name.append(each[:-5])
Fdf_1900_2080_annual_b.columns = New_Mod_Name
Fdf_1900_2080_annual_b.plot(ax=ax, color=[color_dict[x] for x in Fdf_1900_2080_annual_b.columns])
plt.legend(loc=9, bbox_to_anchor=(0.5, -0.2))
ax.xaxis.grid(False)
fig.patch.set_alpha(ALPHA_figs)

@ -71,9 +71,8 @@ def select_min_med_max_dif_model(NARCLIM_df):
return dfall , dfmin, dfmed, dfmax, Min_dif_mod_name,Med_dif_mod_name, Max_dif_mod_name
def calculate_deltas_NF_FF2(Annual_df, Seasonal_df, Stats):
def calculate_deltas_NF_FF2(Annual_df, Seasonal_df, Stats, Perc_vs_Abs):
"""calculates the "deltas" between nearfuture and present day for annual or seasonal climate data in pandas TS format"""
times = ['annual', 'DJF', 'MAM', 'JJA','SON']
delta_all_df = pd.DataFrame()
for temp in times:
@ -106,10 +105,63 @@ def calculate_deltas_NF_FF2(Annual_df, Seasonal_df, Stats):
for unique_model in unique_models:
dfdiff = Mean_df.filter(regex= unique_model)
type(dfdiff)
delta_NF = dfdiff[1] - dfdiff[0]
delta_NF_ensemble.append(delta_NF)
delta_FF = dfdiff[2] - dfdiff[1]
delta_FF_ensemble.append(delta_FF)
if Perc_vs_Abs == 'absolute':
delta_NF = dfdiff[1] - dfdiff[0]
delta_NF_ensemble.append(delta_NF)
delta_FF = dfdiff[2] - dfdiff[0]
delta_FF_ensemble.append(delta_FF)
if Perc_vs_Abs == 'percent':
delta_NF = ((dfdiff[1] - dfdiff[0])/dfdiff[0])*100
delta_NF_ensemble.append(delta_NF)
delta_FF = ((dfdiff[2] - dfdiff[0])/dfdiff[0])*100
delta_FF_ensemble.append(delta_FF)
delta_df1=pd.DataFrame(delta_NF_ensemble, index=unique_models)
delta_df2=pd.DataFrame(delta_FF_ensemble, index=unique_models)
delta_df= pd.concat([delta_df1, delta_df2], axis=1)
#rename columns
delta_df.columns = Column_names
#add a row with medians and 10 and 90th percentiles
delta_df.loc['10th'] = pd.Series({Column_names[0]:np.percentile(delta_df[Column_names[0]], 10), Column_names[1]:np.percentile(delta_df[Column_names[1]], 10)})
delta_df.loc['median'] = pd.Series({Column_names[0]:np.percentile(delta_df[Column_names[0]], 50), Column_names[1]:np.percentile(delta_df[Column_names[1]], 50)})
delta_df.loc['90th'] = pd.Series({Column_names[0]:np.percentile(delta_df[Column_names[0]], 90), Column_names[1]:np.percentile(delta_df[Column_names[1]], 90)})
#append df to overall df
delta_all_df = pd.concat([delta_all_df, delta_df], axis=1)
if(Stats[:4] =='days'):
delta_all_df = delta_all_df .astype(int).fillna(0.0)
return delta_all_df
def calculate_deltas_monthly(Monthly_df, Stats, Perc_vs_Abs):
"""calculates the "deltas" between nearfuture and present day for annual or seasonal climate data in pandas TS format"""
delta_all_df = pd.DataFrame()
for i in range(1, 13, 1):
Mean_df = Monthly_df[Monthly_df.index.month==i].mean()
Column_names = [str(i)+'_near', str(i)+'_far']
if(Stats[:4] =='days'):
models = list(Monthly_df.mean().index.get_level_values(0))
else:
models = list(Monthly_df.mean().index)
newmodel = []
for each in models:
newmodel.append(each[:-5])
unique_models = set(newmodel)
# calculate diff for each unique model
delta_NF_ensemble = []
delta_FF_ensemble = []
for unique_model in unique_models:
dfdiff = Mean_df.filter(regex= unique_model)
type(dfdiff)
if Perc_vs_Abs == 'absolute':
delta_NF = dfdiff[1] - dfdiff[0]
delta_NF_ensemble.append(delta_NF)
delta_FF = dfdiff[2] - dfdiff[0]
delta_FF_ensemble.append(delta_FF)
if Perc_vs_Abs == 'percent':
delta_NF = ((dfdiff[1] - dfdiff[0])/dfdiff[0])*100
delta_NF_ensemble.append(delta_NF)
delta_FF = ((dfdiff[2] - dfdiff[0])/dfdiff[0])*100
delta_FF_ensemble.append(delta_FF)
delta_df1=pd.DataFrame(delta_NF_ensemble, index=unique_models)
delta_df2=pd.DataFrame(delta_FF_ensemble, index=unique_models)
@ -126,7 +178,6 @@ def calculate_deltas_NF_FF2(Annual_df, Seasonal_df, Stats):
if(Stats[:4] =='days'):
delta_all_df = delta_all_df .astype(int).fillna(0.0)
return delta_all_df
def import_present_day_climdata_csv(Estuary, Clim_var_type):
"""

Binary file not shown.

@ -19,30 +19,39 @@ setwd("C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/P
#Set inputs
######################
Case.Study <- "CASESTUDY2"
Estuary <- "RICHMOND"
Riv.Gauge.loc <- "OAKLANDROAD" #GRETA
Est.Gauge.loc <- "CORAKI" #"RAYMONDTERRACE" # "HEXHAMBRIDGE"
Estuary <- "HUNTER"
Riv.Gauge.loc <- "GRETA" #GRETA
Est.Gauge.loc <- "RAYMONDTERRACE" #"CORAKI" #"RAYMONDTERRACE" # "HEXHAMBRIDGE"
logtransformFlow <- TRUE
ggplotGAM.k <- 15
rivTempGAM.k <- 20
Version <- 'V2'
Version <- 'V3'
Fontsize <- 12
######################
######################
Output.Directory <- paste('./Output/', Case.Study, '/', Estuary,'/Recent_Trends/Riv_', Riv.Gauge.loc,'_Est_',Est.Gauge.loc,'_GAMk', ggplotGAM.k, '/', sep="")
Output.Directory <- paste('./Output/CASESTUDY2_V4/', Estuary,'/Recent_Trends/', sep="")
if (file.exists(Output.Directory)){
print('output folder already existed and was not created again')
} else {
dir.create(file.path(Output.Directory))
print('output folder did not exist and was created')
}
Output.Directory <- paste('./Output/CASESTUDY2_V4/', Estuary,'/Recent_Trends/Riv_', Riv.Gauge.loc,'_Est_',Est.Gauge.loc,'_GAMk', ggplotGAM.k, 'b/', sep="")
if (file.exists(Output.Directory)){
print('output folder already existed and was not created again')
} else {
dir.create(file.path(Output.Directory))
print('output folder did not exist and was created')
}
######################
######################
#Set input file paths
######################
pattern = paste('SILO_climdata_', Estuary,'*', sep="")
pattern = paste('SILO_climdata_', Estuary,'_Catchment*', sep="")
AirT_CSV_Path <- list.files(paste("./Data/SILO/",Case.Study, '/',sep=""), pattern, full.names=T, recursive=T)
pattern = paste(Estuary,'@', Riv.Gauge.loc, '.*.ALL.csv', sep="")
@ -114,6 +123,12 @@ RivT.full.pvalNCV_ECall <- summary(linear.trend.model_EC_all )$coefficients[2,4]
RivT.full.lintrend <- summary(linear.trend.model_EC_all )$coefficients[2,1] * 356
############River temp
#export interpolated data as csv
#csv.name <- "C:/Users/z5025317/OneDrive - UNSW/CC_Estuaries_CASESTUDY2/Data/River_Gauge_Data/HUNTER@Greta_210064_Temp_interpolated.csv"
#write.csv(RivT.df, csv.name)
############River flow
#Load a daily (no gaps) time series as a time serie baseline for other time series used here
#Here we use the raw DPI CSV format that comes with a bunch of metadata
@ -128,7 +143,8 @@ rm(dat,char.df)
#RivQ.full.TS <- zoo(log10((as.numeric(as.character(RivQ.df$Flow))) +1) , order.by= as.Date(RivQ.df[,"Date"], format = "%d/%m/%Y")) #=daily time series of rainfall for creation of clean, daily TS of ET and Q
RivQ.full.TS <- zoo(as.numeric(as.character(RivQ.df$Flow)), order.by= as.Date(RivQ.df[,"Date"], format = "%d/%m/%Y")) #=daily time series of rainfall for creation of clean, daily TS of ET and Q
RivQ.TS <- window(RivQ.full.TS, start=as.Date("1990-01-01"), end=as.Date("2018-01-01"))
#RivQ.TS <- window(RivQ.full.TS, start=as.Date("1990-01-01"), end=as.Date("2018-01-01"))
RivQ.full.TS <- window(RivQ.full.TS, start=as.Date("2013-06-01"), end=as.Date("2018-06-01"))
RivQ.full.df <- data.frame(RivQ.full.TS) ### This is only done because
RivQ.df <- data.frame(RivQ.TS)
colnames(RivQ.df) <- 'RivQmean'
@ -149,7 +165,10 @@ RivQ.full.lintrend <- summary(linear.trend.model_EC_all )$coefficients[2,1] * 35
#Load a daily (no gaps) time series as a time serie baseline for other time series used here
SST.df <- data.frame(read.csv(SST_CSV_Path))
SST.full.TS <- zoo(SST.df$NNRP_R1_1950-273.15, order.by= as.Date(SST.df[,"X"], format = "%Y-%m-%d")) #=daily time series of rainfall for creation of clean, daily TS of ET and Q
SST.TS <- window(SST.full.TS, start=as.Date("1990-01-01"), end=as.Date("2018-01-01"))
#SST.TS <- window(SST.full.TS, start=as.Date("1990-01-01"), end=as.Date("2018-01-01"))
SST.full.TS <- window(SST.full.TS, start=as.Date("2013-06-01"), end=as.Date("2018-06-01"))
SST.full.df <- data.frame(SST.full.TS)
SST.df <- data.frame(SST.TS)
str(SST.df)
@ -185,8 +204,7 @@ rm(dat,char.df)
#replace negative values with NA
EstT.df$Temp <- replace(EstT.df$Temp, which(as.numeric(as.character(EstT.df$Temp)) < 11), NA)
EstT.full.TS <- zoo(as.numeric(as.character(EstT.df$Temp)), order.by= as.Date(EstT.df[,"Date"], format = "%d/%m/%Y")) #=daily time series of rainfall for creation of clean, daily TS of ET and Q
plot(EstT.full.TS)
EstT.TS <- window(EstT.full.TS, start=as.Date("2013-06-01"), end=as.Date("2018-01-01"))
EstT.TS <- window(EstT.full.TS, start=as.Date("2013-06-01"), end=as.Date("2018-06-01"))
EstT.full.TS <- EstT.TS
EstT.full.df <- data.frame(EstT.TS) ### This is only done because of poor data at beginning
EstT.df <- data.frame(EstT.TS)
@ -211,7 +229,7 @@ EstT.full.lintrend <- summary(linear.trend.model_EC_all )$coefficients[2,1] * 35
##################################### Full Time Period
#Air temp Full period
p1air <- ggplot(AirT.full.df, aes(y=tasmean, x=index(AirT.full.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary," Catchment Centroid - Linear and smooth trend in catchment airT (SILO) lin trend was ",
ggtitle(paste(Estuary," Catchment Centroid - Linear and smooth trends in catchment airT (SILO) lin trend was ",
round(AirT.full.lintrend,3), ' C°/year with p=', round(AirT.full.pvalNCV_ECall,10), sep=" ")) +
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
@ -219,12 +237,14 @@ p1air <- ggplot(AirT.full.df, aes(y=tasmean, x=index(AirT.full.TS))) + geom_line
ylab("Air Temperature [C°]") + xlab("Time")
p1riv <- ggplot(RivT.full.df, aes(y=rivTmean, x=index(RivT.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary,'@', Riv.Gauge.loc, " - Linear and smooth trend in river temperature (GAUGE) lin trend was ",
round(RivT.full.lintrend,3), ' C°/year with p=', round(RivT.full.pvalNCV_ECall,10), sep=" ")) +
ggtitle(paste(Estuary, " - Linear and smooth trends in gauged river temperature (@", Riv.Gauge.loc ,") - Linear trend was ",
round(RivT.full.lintrend,3), 'C°/year with p=', sprintf("%.5f",round(RivT.full.pvalNCV_ECall,10)), sep="")) +
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("River Temperature [C°]") + xlab("Time")
ylab("River Temperature [C°]") + xlab("Time") + xlab(NULL) +
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
if(logtransformFlow ==TRUE){
@ -234,7 +254,9 @@ p1rivQ <- ggplot(RivQ.full.df, aes(y=log10(RivQmean+2), x=index(RivQ.full.TS)))
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab(expression(paste("log10(River Flow [ML/day] + 2)", sep=""))) + xlab("Time")
ylab(expression(paste("log10(River Flow [ML/day] + 2)", sep=""))) + xlab("Time") +
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
} else {
p1rivQ <- ggplot(RivQ.full.df, aes(y=RivQmean, x=index(RivQ.full.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary,'@', Riv.Gauge.loc, " - Linear and smooth trend in river flow (GAUGE) lin trend was ",
@ -242,28 +264,54 @@ p1rivQ <- ggplot(RivQ.full.df, aes(y=log10(RivQmean+2), x=index(RivQ.full.TS)))
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab(expression(paste("River Flow [ML/day]", sep=""))) + xlab("Time")
ylab(expression(paste("River Flow [ML/day]", sep=""))) + xlab("Time") +
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
}
#Sea temp Full period
p1sst <- ggplot(SST.full.df, aes(y=SSTmean, x=index(SST.full.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary, "NNRP (NARCLIM reanalysis) - Linear and smooth trend in sea surface temperature (NNRP) lin trend was ",
round(SST.full.lintrend,3), ' C°/year with p=', round(SST.full.pvalNCV_ECall,10), sep=" ")) +
ggtitle(paste(Estuary, " offshore - Linear and smooth trends in sea surface temperature (NNRP NARCLIM reanalysis) linear trend was ",
round(SST.full.lintrend,3), 'C°/year with p=', sprintf("%.5f",round(SST.full.pvalNCV_ECall,10)), sep=" ")) +
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("Sea Surface Temperature [C°]") + xlab("Time")
ylab("Sea Surface Temperature [C°]") + xlab("Time")+ xlab(NULL) +
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
p1Est <- ggplot(EstT.full.df, aes(y=EstTmean, x=index(EstT.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary,'@', Est.Gauge.loc, " - Linear and smooth trend in Estuary temperature (GAUGE) lin trend was ",
round(EstT.full.lintrend,3), ' C°/year with p=', round(EstT.full.pvalNCV_ECall,10), sep=" ")) +
round(EstT.full.lintrend,3), 'C°/year with p=', round(EstT.full.pvalNCV_ECall,10), sep=" ")) +
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("Estuary Temperature [C°]") + xlab("Time")
ylab("Estuary Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
p1Est2 <- ggplot(EstT.full.df, aes(y=EstTmean, x=index(EstT.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary,'@', Est.Gauge.loc, " - Linear and smooth trend in Estuary temperature (GAUGE) lin trend was ",
round(EstT.full.lintrend,3), 'C°/year with p=', round(EstT.full.pvalNCV_ECall,10), sep=" ")) +
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
#stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
binomial_smooth(formula = y ~ splines::ns(x, 2)) +
ylab("Estuary Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
png.name <- paste(Output.Directory, Estuary, '@', Est.Gauge.loc, '_Trends_estTmean_full_period_', Sys.Date(),"nosmoothb3.png", sep="")
png(file = png.name, width = 10.5, height = 4, units='in', res=500)
grid.arrange(p1Est2,ncol=1)
dev.off()
gA1 <- ggplotGrob(p1riv)
gA2 <- ggplotGrob(p1sst)
gA1$widths <- gA2$widths
#export to png
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_tasmean_full_period_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
@ -271,7 +319,11 @@ grid.arrange(p1air,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_rivTmean_full_period_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(p1riv,ncol=1)
grid.arrange(p1riv,p1riv,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_rivTmean_SSTmean_full_period4_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(gA1,gA2,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_RivQmean_full_period_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
@ -279,12 +331,18 @@ grid.arrange(p1rivQ,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_SSTmean_full_period_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(p1sst,ncol=1)
grid.arrange(p1sst,p1sst,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Est.Gauge.loc, '_Trends_estTmean_full_period_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
png.name <- paste(Output.Directory, Estuary, '@', Est.Gauge.loc, '_Trends_estTmean_full_period_', Sys.Date(),"b.png", sep="")
png(file = png.name, width = 10.5, height = 4, units='in', res=500)
grid.arrange(p1Est,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Est.Gauge.loc, '_Trends_estTmean_full_period_', Sys.Date(),"nosmoothb.png", sep="")
png(file = png.name, width = 10.5, height = 4, units='in', res=500)
grid.arrange(p1Est2,ncol=1)
dev.off()
##################################### Full Time Period
@ -300,7 +358,9 @@ p2air <- ggplot(combined.df, aes(y=tasmean, x=index(combined.TS))) + geom_line(a
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("Air Temperature [C°]") + xlab("Time")
ylab("Air Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#Riv temp
p2riv <- ggplot(combined.df, aes(y=rivTmean, x=index(combined.TS))) + geom_line(alpha=0.5) +
@ -309,7 +369,9 @@ p2riv <- ggplot(combined.df, aes(y=rivTmean, x=index(combined.TS))) + geom_line(
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("River Temperature [C°]") + xlab("Time")
ylab("River Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#Riv flow
if(logtransformFlow ==TRUE){
@ -319,7 +381,9 @@ if(logtransformFlow ==TRUE){
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab(expression(paste("log10(River Flow [ML/day] + 2)", sep=""))) + xlab("Time")
ylab(expression(paste("log10(River Flow [ML/day] + 2)", sep=""))) + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
} else {
p2rivQ <- ggplot(combined.df, aes(y=rivQmean, x=index(combined.TS))) + geom_line(alpha=0.5) +
ggtitle(paste(Estuary,'@', Riv.Gauge.loc, " - Linear and smooth trend in river flow (GAUGE) lin trend was ",
@ -327,7 +391,9 @@ if(logtransformFlow ==TRUE){
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab(expression(paste("River Flow [ML/day]", sep=""))) + xlab("Time")
ylab(expression(paste("River Flow [ML/day]", sep=""))) + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
}
#Sea temp
@ -337,7 +403,9 @@ p2sst <- ggplot(combined.df, aes(y=SSTmean, x=index(combined.TS))) + geom_line(a
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("Sea Surface Temperature [C°]") + xlab("Time")
ylab("Sea Surface Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#sst temp
p2Est <- ggplot(combined.df, aes(y=EstTmean, x=index(combined.TS))) + geom_line(alpha=0.5) +
@ -346,7 +414,9 @@ p2Est <- ggplot(combined.df, aes(y=EstTmean, x=index(combined.TS))) + geom_line(
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
#stat_smooth(method=gam, formula=y~s(x, k=ggplotGAM.k), se=T, size=0.5) +
ylab("Estuary Temperature [C°]") + xlab("Time")
ylab("Estuary Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#export to png
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_tasmean_1990-present_', Sys.Date(),".png", sep="")
@ -382,6 +452,11 @@ png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(gA,gB ,gC,ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_SST_RivT_1990-present_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(gB,gC, ncol=1)
dev.off()
png.name <- paste(Output.Directory, Estuary, '@', Riv.Gauge.loc, '_Trends_SST_RivT_EstT_1990-present_', Sys.Date(),".png", sep="")
png(file = png.name, width = 10.5, height = 7, units='in', res=500)
grid.arrange(gB,gD,gC,ncol=1)
@ -413,3 +488,4 @@ dev.off()

@ -19,7 +19,7 @@ setwd("C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/P
#Set inputs
######################
Case.Study <- "CASESTUDY2"
Estuary <- "RICHMOND"
Estuary <- "GEORGES"
Climvar <- 'tasmean'
ggplotGAM.k <- 7
######################
@ -43,7 +43,7 @@ pattern = paste(Estuary, '.*.csv', sep="")
Buoy_CSV_Path <- list.files("./Data/Ocean_Data/Waves_SST/", pattern, full.names=T)
Buoy.name <- substr(list.files("./Data/Ocean_Data/Waves_SST/", pattern, full.names=F), nchar(Estuary)+2, nchar(Estuary)+7)
SST.df <- data.frame(read.csv(AirT_CSV_Path, colClasses = "character"))
SST.df <- data.frame(read.csv(Buoy_CSV_Path, colClasses = "character"))
colnames(SST.df) <- as.character(SST.df[9,])
SST.df = SST.df[-c(1:9), ]
@ -95,6 +95,13 @@ SST.full.df$Julday1 <- seq(1,length(SST.full.df[,1]),1)
linear.trend.model_EC_all <- lm(SSTmean ~ Julday1, SST.full.df)
SST.pvalNCV_ECall <- summary(linear.trend.model_EC_all )$coefficients[2,4]
SST.lintrend <- summary(linear.trend.model_EC_all )$coefficients[2,1] * 365
#export interpolated data as csv
csv.name <- paste("C:/Users/z5025317/OneDrive - UNSW/WRL_Postdoc_Manual_Backup/WRL_Postdoc/Projects/Paper#1/Data/Ocean_Data/Waves_SST/Processed/",Estuary , "_SST_Daily.csv", sep="")
write.csv(SST.full.df, csv.name)
######################
@ -109,7 +116,10 @@ p1air <- ggplot(Hsig.full.df, aes(y=Hsigmean, x=index(Hsig.full.TS))) + geom_lin
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=4), se=T, size=0.5, col="red") +
ylab("Significant Wave Height [m]") + xlab("Time")
ylab("Significant Wave Height [m]") + xlab("Time") +
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#export to png
png.name <- paste(Output.Directory, '/Trends_Significant_Wave_Height_',Buoy.name, '_full_period_', Sys.Date(),".png", sep="")
@ -125,7 +135,10 @@ p1air <- ggplot(Hsig.full.df, aes(y=Hsigmean, x=index(Hsig.full.TS))) + geom_lin
stat_smooth(method=gam, formula=y~s(x, k=13), se=T, size=0.5, col="red") +
#stat_smooth(method=gam, formula=y~s(x, k=8), se=T, size=0.5, cor="blue") +
stat_smooth(method=gam, formula=y~s(x, k=5), se=T, size=0.5, col="green") +
ylab("Significant Wave Height [m]") + xlab("Time")
ylab("Significant Wave Height [m]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#export to png
png.name <- paste(Output.Directory, '/Trends_Significant_Wave_Height_',Buoy.name, '_MultiGAM_full_period_', Sys.Date(),".png", sep="")
@ -140,7 +153,10 @@ p1air <- ggplot(SST.full.df, aes(y=SSTmean, x=index(SST.full.TS))) + geom_line(a
theme(plot.title=element_text(face="bold", size=9)) +
geom_smooth(method='lm',fill="green", formula=y~x, colour="darkgreen", size = 0.5) +
stat_smooth(method=gam, formula=y~s(x, k=4), se=T, size=0.5, col="red") +
ylab("Sea Surface Temperature [C°]") + xlab("Time")
ylab("Sea Surface Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#export to png
png.name <- paste(Output.Directory, '/Trends_SST_',Buoy.name, '_full_period_', Sys.Date(),".png", sep="")
@ -156,7 +172,10 @@ p1air <- ggplot(SST.full.df, aes(y=SSTmean, x=index(SST.full.TS))) + geom_line(a
stat_smooth(method=gam, formula=y~s(x, k=13), se=T, size=0.5, col="red") +
#stat_smooth(method=gam, formula=y~s(x, k=8), se=T, size=0.5, cor="blue") +
stat_smooth(method=gam, formula=y~s(x, k=5), se=T, size=0.5, col="green") +
ylab("Sea Surface Temperature [C°]") + xlab("Time")
ylab("Sea Surface Temperature [C°]") + xlab("Time")+
theme(axis.text=element_text(size=Fontsize)) +
theme(panel.grid.major.x = element_blank() ,panel.grid.minor.x = element_blank(), panel.grid.major.y = element_line( size=.1, color="white" ))
#export to png
png.name <- paste(Output.Directory, '/Trends_SST_',Buoy.name, '_MultiGAM_full_period_', Sys.Date(),".png", sep="")

Loading…
Cancel
Save