diff --git a/probabilistic-analysis/probabilistic_assessment.py b/probabilistic-analysis/probabilistic_assessment.py index 83c1ec9..59ae0cd 100644 --- a/probabilistic-analysis/probabilistic_assessment.py +++ b/probabilistic-analysis/probabilistic_assessment.py @@ -464,6 +464,12 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year, figsize=(16, 24), sharey='row') + # Check whether to save probabilistic diagnostics + for _, bp in pd.DataFrame(diagnostics).iterrows(): + if ((str(prof['block']) == str(bp['block'])) + and (prof['profile'] == bp['profile'])): + output_diagnostics = True + # Loop through years pbar_year = tqdm(output_years, leave=False) for j, year in enumerate(pbar_year): @@ -595,103 +601,102 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year, header=False, float_format='%g') - # Check whether to save probabilistic diagnostics - for _, bp in pd.DataFrame(diagnostics).iterrows(): - if not ((str(prof['block']) == str(bp['block'])) and - (prof['profile'] == bp['profile'])): - continue # Don't save - - # Save probabilistic diagnostics - year_idx = year == years - - # Find index where most extreme event occurred - event_year_idx = chainage_with_recession.argmin(axis=0) - - # define dummy index - ix = np.arange(n_runs) - dump_data = { - 'Sea level rise (m)': - slr[event_year_idx, ix].ravel(), - 'Bruun factor (-)': - bf[event_year_idx, ix].ravel(), - 'Bruun factor x SLR (m)': - slr[event_year_idx, ix].ravel() * - bf[event_year_idx, ix].ravel(), - 'Underlying trend rate (m/yr)': - ur_rate[year_idx, :].ravel(), - 'Underlying trend (m)': - ur[event_year_idx, ix].ravel(), - 'Underlying + SLR (m)': - r[event_year_idx, ix].ravel(), - 'Total movement (m)': - (storm_demand_dist + r)[event_year_idx, ix].ravel(), - 'Storm demand distance (m)': - storm_demand_dist[event_year_idx, ix].ravel(), - 'Storm demand volume (m3/m)': - storm_demand_volume[event_year_idx, ix].ravel(), - } - - dump_df = pd.DataFrame(dump_data) - dump_df['Run ID'] = np.arange(len(event_year_idx)) + 1 - dump_df['Event year'] = years[event_year_idx] - dump_df['Years elapsed'] = event_year_idx + 1 - - # Reorder columns - dump_df = dump_df[[ - 'Run ID', - 'Event year', - 'Years elapsed', - 'Sea level rise (m)', - 'Bruun factor (-)', - 'Bruun factor x SLR (m)', - 'Underlying trend rate (m/yr)', - 'Underlying trend (m)', - 'Underlying + SLR (m)', - 'Total movement (m)', - 'Storm demand distance (m)', - 'Storm demand volume (m3/m)', - ]] - - # Sort based on maximum movement - dump_df = dump_df.sort_values('Total movement (m)', - ascending=False) - - # Add encounter probabilities - dump_df['Encounter probability (%)'] = np.linspace(0, - 100, - num=n_runs + - 2)[1:-1] - dump_df = dump_df.set_index('Encounter probability (%)') - - csv_name = os.path.join( + if output_diagnostics: + # Save probabilistic diagnostics + year_idx = year == years + + # Find index where most extreme event occurred + event_year_idx = chainage_with_recession.argmin(axis=0) + + # define dummy index + ix = np.arange(n_runs) + dump_data = { + 'Sea level rise (m)': + slr[event_year_idx, ix].ravel(), + 'Bruun factor (-)': + bf[event_year_idx, ix].ravel(), + 'Bruun factor x SLR (m)': + slr[event_year_idx, ix].ravel() * + bf[event_year_idx, ix].ravel(), + 'Underlying trend rate (m/yr)': + ur_rate[year_idx, :].ravel(), + 'Underlying trend (m)': + ur[event_year_idx, ix].ravel(), + 'Underlying + SLR (m)': + r[event_year_idx, ix].ravel(), + 'Total movement (m)': + (storm_demand_dist + r)[event_year_idx, ix].ravel(), + 'Storm demand distance (m)': + storm_demand_dist[event_year_idx, ix].ravel(), + 'Storm demand volume (m3/m)': + storm_demand_volume[event_year_idx, ix].ravel(), + } + + dump_df = pd.DataFrame(dump_data) + dump_df['Run ID'] = np.arange(len(event_year_idx)) + 1 + dump_df['Event year'] = years[event_year_idx] + dump_df['Years elapsed'] = event_year_idx + 1 + + # Reorder columns + dump_df = dump_df[[ + 'Run ID', + 'Event year', + 'Years elapsed', + 'Sea level rise (m)', + 'Bruun factor (-)', + 'Bruun factor x SLR (m)', + 'Underlying trend rate (m/yr)', + 'Underlying trend (m)', + 'Underlying + SLR (m)', + 'Total movement (m)', + 'Storm demand distance (m)', + 'Storm demand volume (m3/m)', + ]] + + # Sort based on maximum movement + dump_df = dump_df.sort_values('Total movement (m)', + ascending=False) + + # Add encounter probabilities + dump_df['Encounter probability (%)'] = np.linspace( + 0, 100, num=n_runs + 2)[1:-1] + dump_df = dump_df.set_index('Encounter probability (%)') + + csv_name = os.path.join( + 'diagnostics', + '{} {} {}.csv'.format(beach_scenario, year, + profile_type)) + dump_df.to_csv(csv_name, float_format='%g') + + for i, c in enumerate(dump_df.columns[3:]): + ax[i, j].plot(dump_df.index, + dump_df[c], + '.', + color='#666666', + markersize=2) + ax[i, j].spines['right'].set_visible(False) + ax[i, j].spines['top'].set_visible(False) + if j == 0: + ax[i, 0].yaxis.set_label_coords(-0.4, 0.5) + label = c.replace('(', '\n(') + ax[i, 0].set_ylabel(label, + va='top', + linespacing=1.5) + + ax[i, j].set_xlabel('Encounter probability (%)', + labelpad=10) + ax[0, j].set_title(year) + + fig.suptitle('{}, block {}, profile {}'.format( + beach_scenario, prof['block'], prof['profile']), + y=0.92) + + if output_diagnostics: + figname = os.path.join( 'diagnostics', - '{} {} {}.csv'.format(beach_scenario, year, profile_type)) - dump_df.to_csv(csv_name, float_format='%g') - - for i, c in enumerate(dump_df.columns[3:]): - ax[i, j].plot(dump_df.index, - dump_df[c], - '.', - color='#666666', - markersize=2) - ax[i, j].spines['right'].set_visible(False) - ax[i, j].spines['top'].set_visible(False) - if j == 0: - ax[i, 0].yaxis.set_label_coords(-0.4, 0.5) - label = c.replace('(', '\n(') - ax[i, 0].set_ylabel(label, va='top', linespacing=1.5) - - ax[i, j].set_xlabel('Encounter probability (%)', labelpad=10) - ax[0, j].set_title(year) - - fig.suptitle('{}, block {}, profile {}'.format( - beach_scenario, prof['block'], prof['profile']), - y=0.92) - - figname = os.path.join( - 'diagnostics', '{} {}.png'.format(beach_scenario, - profile_type)) - plt.savefig(figname, bbox_inches='tight', dpi=300) + f'{beach_scenario} {profile_type} scatter.png') + plt.savefig(figname, bbox_inches='tight', dpi=300) + plt.close(fig) def main():