diff --git a/probabilistic-analysis/probabilistic_assessment.py b/probabilistic-analysis/probabilistic_assessment.py index 5549709..7441e19 100644 --- a/probabilistic-analysis/probabilistic_assessment.py +++ b/probabilistic-analysis/probabilistic_assessment.py @@ -277,16 +277,20 @@ def get_ongoing_recession(n_runs, start_year, end_year, sea_level_rise, # Calculate total underlying recession year_factor = np.arange(1, n_years + 1)[:, np.newaxis] underlying_recession = underlying_recession_rate * year_factor + underlying_recession_rate = np.tile(underlying_recession_rate, + [n_years, 1]) # Remove probabilistic component from start year slr[0, :] = slr[0, :].mean() underlying_recession[0, :] = underlying_recession[0, :].mean() bruun_factor[0, :] = bruun_factor[0, :].mean() + underlying_recession_rate[0, :] = underlying_recession_rate[0, :].mean() # Calculate total ongoing recession (m) ongoing_recession = slr * bruun_factor + underlying_recession - return ongoing_recession, slr, bruun_factor, underlying_recession + return (ongoing_recession, slr, bruun_factor, underlying_recession, + underlying_recession_rate) def get_storm_demand_volume(ref_aep, ref_vol, n, mode='fit'): @@ -397,9 +401,10 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year, probabilistic = True # Simulate ongoing shoreline recession - r, slr, bf, ur = get_ongoing_recession(n_runs, start_year, end_year, - sea_level_rise, bruun_factor, - underlying_recession) + r, slr, bf, ur, ur_rate = get_ongoing_recession(n_runs, start_year, + end_year, sea_level_rise, + bruun_factor, + underlying_recession) ongoing_recession = r.copy() # Pre-allocate storm demand volume for each year (m3/m)