Beautify with yapf

master
Dan Howe 3 years ago
parent bb78c46ee7
commit 640b0d8061

@ -51,13 +51,14 @@ def parse_args():
the parsed input arguments in a dict
"""
parser = argparse.ArgumentParser(usage=__doc__)
parser.add_argument(
'-f', '--file', help='name of parameter file', default=None)
parser.add_argument(
'-a',
'--all',
help='process all *.yaml files in folder',
action='store_true')
parser.add_argument('-f',
'--file',
help='name of parameter file',
default=None)
parser.add_argument('-a',
'--all',
help='process all *.yaml files in folder',
action='store_true')
return parser.parse_args()
@ -189,22 +190,22 @@ def get_ongoing_recession(n_runs, start_year, end_year, sea_level_rise,
n_years = len(years)
# Interpolate sea level rise projections (m)
slr_mode = np.interp(
years, xp=sea_level_rise['year'],
fp=sea_level_rise['mode'])[:, np.newaxis]
slr_mode = np.interp(years,
xp=sea_level_rise['year'],
fp=sea_level_rise['mode'])[:, np.newaxis]
try:
slr_min = np.interp(
years, xp=sea_level_rise['year'],
fp=sea_level_rise['min'])[:, np.newaxis]
slr_min = np.interp(years,
xp=sea_level_rise['year'],
fp=sea_level_rise['min'])[:, np.newaxis]
except ValueError:
# Use mode for deterministic beaches
slr_min = slr_mode
try:
slr_max = np.interp(
years, xp=sea_level_rise['year'],
fp=sea_level_rise['max'])[:, np.newaxis]
slr_max = np.interp(years,
xp=sea_level_rise['year'],
fp=sea_level_rise['max'])[:, np.newaxis]
except ValueError:
# Use mode for deterministic beaches
slr_max = slr_mode
@ -215,11 +216,10 @@ def get_ongoing_recession(n_runs, start_year, end_year, sea_level_rise,
for i in range(n_years):
# Use triangular distribution for SLR in each year (m)
try:
slr[i, :] = np.random.triangular(
left=slr_min[i],
mode=slr_mode[i],
right=slr_max[i],
size=n_runs)
slr[i, :] = np.random.triangular(left=slr_min[i],
mode=slr_mode[i],
right=slr_max[i],
size=n_runs)
except ValueError:
# Use constant value if slr_min == slr_max
slr[i, :] = np.ones([1, n_runs]) * slr_mode[i]
@ -236,11 +236,10 @@ def get_ongoing_recession(n_runs, start_year, end_year, sea_level_rise,
# Simulate probabilistic Bruun factors (-)
if (bruun_factor['min'] < bruun_factor['mode'] < bruun_factor['max']):
# Use probabilistic method if min and max are provided
bruun_factor = np.random.triangular(
left=bruun_factor['min'],
mode=bruun_factor['mode'],
right=bruun_factor['max'],
size=n_runs)
bruun_factor = np.random.triangular(left=bruun_factor['min'],
mode=bruun_factor['mode'],
right=bruun_factor['max'],
size=n_runs)
else:
# Ensure values were not given in reverse order
if bruun_factor['min'] > bruun_factor['mode']:
@ -412,8 +411,10 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
if probabilistic:
for i in range(len(years)):
# Generate synthetic storm demands for each year
storm_demand_volume[i, :] = get_storm_demand_volume(
ref_aep, ref_vol, n=n_runs, mode='fit')
storm_demand_volume[i, :] = get_storm_demand_volume(ref_aep,
ref_vol,
n=n_runs,
mode='fit')
else:
# Get storm demand for 1% AEP event
sort_idx = np.argsort(ref_aep)
@ -434,22 +435,21 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
col_names = [c for c in df_in.columns if c.isdigit()]
# Loop through profiles
pbar_profile = tqdm(
df_in[df_in['beach'] == beach_name].iterrows(),
total=df_in[df_in['beach'] == beach_name].shape[0])
pbar_profile = tqdm(df_in[df_in['beach'] == beach_name].iterrows(),
total=df_in[df_in['beach'] == beach_name].shape[0])
for i, prof in pbar_profile:
pbar_profile.set_description(('Block: {}, profile: {}'.format(
prof['block'], prof['profile'])))
pbar_profile.set_description(
('Block: {}, profile: {}'.format(prof['block'],
prof['profile'])))
# Convert storm demand volume to a profile chainage (m)
profile_volume = np.array([int(c) for c in col_names])
profile_chainage = np.array(prof[col_names], dtype=float)
valid_idx = np.isfinite(profile_chainage)
storm_demand_chainage = np.interp(
storm_demand_volume,
xp=profile_volume[valid_idx],
fp=profile_chainage[valid_idx])
storm_demand_chainage = np.interp(storm_demand_volume,
xp=profile_volume[valid_idx],
fp=profile_chainage[valid_idx])
# Loop through years
pbar_year = tqdm(output_years)
@ -465,31 +465,31 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
for b, p, ch in zip(min_chainage['block'],
min_chainage['profile'],
min_chainage['chainage']):
idx = (df_out['block'] == str(b)) & (
df_out['profile'] == p)
idx = (df_out['block'] == str(b)) & (df_out['profile']
== p)
df_out.loc[idx, 'min_chainage'] = ch
# Specify which segments to break
df_out = df_out.assign(segment_gaps=False)
for b, p, in zip(segment_gaps['block'],
segment_gaps['profile']):
idx = (df_out['block'] == str(b)) & (
df_out['profile'] == p)
idx = (df_out['block'] == str(b)) & (df_out['profile']
== p)
df_out.loc[idx, 'segment_gaps'] = True
# Specify which profiles to plot
df_out = df_out.assign(plot_stats=False)
for b, p in zip(plot_stats['block'], plot_stats['profile']):
idx = (df_out['block'] == str(b)) & (
df_out['profile'] == p)
idx = (df_out['block'] == str(b)) & (df_out['profile']
== p)
df_out.loc[idx, 'plot_stats'] = True
# Specify which profiles to omit from shapefiles
df_out = df_out.assign(omit_from_shp=False)
for b, p in zip(omit_from_shp['block'],
omit_from_shp['profile']):
idx = (df_out['block'] == str(b)) & (
df_out['profile'] == p)
idx = (df_out['block'] == str(b)) & (df_out['profile']
== p)
df_out.loc[idx, 'omit_from_shp'] = True
# Specify additional points to be included in shapefiles
@ -497,9 +497,9 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
for b, p, x, y in zip(insert_points['block'],
insert_points['profile'],
insert_points['x'], insert_points['y']):
idx = np.where((df_out['block'] == str(b)) & (
df_out['profile'] == p) & (
df_out['beach'] == beach_name))[0][0]
idx = np.where((df_out['block'] == str(b))
& (df_out['profile'] == p)
& (df_out['beach'] == beach_name))[0][0]
if not df_out.loc[idx, 'insert_points']:
df_out.loc[idx, 'insert_points'] = []
@ -509,9 +509,9 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
for b, p, x, y in zip(append_points['block'],
append_points['profile'],
append_points['x'], append_points['y']):
idx = np.where((df_out['block'] == str(b)) & (
df_out['profile'] == p) & (
df_out['beach'] == beach_name))[0][0]
idx = np.where((df_out['block'] == str(b))
& (df_out['profile'] == p)
& (df_out['beach'] == beach_name))[0][0]
if not df_out.loc[idx, 'append_points']:
df_out.loc[idx, 'append_points'] = []
@ -568,20 +568,20 @@ def process(beach_name, beach_scenario, n_runs, start_year, end_year,
df_csv = df_out[df_out['beach'] == beach_name]
# Save values for current beach
csv_name = os.path.join(output_folder, '{} {} {}.csv'.format(
beach_scenario, year, profile_type))
csv_name = os.path.join(
output_folder,
'{} {} {}.csv'.format(beach_scenario, year, profile_type))
# Write header for csv file on first iteration
if df_out[df_out['beach'] == beach_name].index[0] == i:
df_csv.loc[[], :].to_csv(csv_name, index=False)
# Append data for current profile
df_csv[df_csv.index == i].to_csv(
csv_name,
mode='a',
index=False,
header=False,
float_format='%g')
df_csv[df_csv.index == i].to_csv(csv_name,
mode='a',
index=False,
header=False,
float_format='%g')
pbar_year.close()
pbar_profile.close()

Loading…
Cancel
Save