Skip to content

Commit c7fd5d5

Browse files
authored
Merge pull request #236 from UW-Hydro/develop
Develop
2 parents d805d05 + c1f2744 commit c7fd5d5

File tree

5 files changed

+77
-173
lines changed

5 files changed

+77
-173
lines changed

README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,11 @@ Or in BibTeX:
145145
}
146146
```
147147

148+
Acknowledgements
149+
================
150+
MetSim has greatly benefited from the user community, who have contributed code, tested features, provided feedback, and helped with documentation.
151+
We would like to thank and acknowledge the work of Ted Bohn, Andy Wood, Kristen Whitney, Yifan Cheng, Liz Clark, Oriana Chegwidden, Ethan Gutmann, Kostas Andreadis, Thomas Remke, Ed Maurer, and Philipp Sommer for their help.
152+
148153

149154
References
150155
==========

docs/whats-new.rst

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,17 @@ What's New
55

66
.. _whats-new.2.3.0:
77

8+
v2.3.2
9+
------
10+
11+
Bug fixes
12+
~~~~~~~~~
13+
- Fix a bug in ascii input reading due to pandas change
14+
15+
Enhancements
16+
~~~~~~~~~~~~
17+
- Drastically speed up PITRI precipitation disaggregation
18+
819
v2.3.1
920
------
1021

@@ -15,7 +26,7 @@ Bug fixes
1526

1627
v2.3.0
1728
------
18-
Enchancements
29+
Enhancements
1930
~~~~~~~~~~~~~
2031
- Allow for variable renaming in INI configuration files.
2132
- Added capability for new YAML configuration file format

metsim/disaggregate.py

Lines changed: 47 additions & 116 deletions
Original file line numberDiff line numberDiff line change
@@ -107,9 +107,8 @@ def disaggregate(df_daily: pd.DataFrame, params: dict,
107107
df_disagg['longwave'] = longwave(
108108
df_disagg['temp'].values, df_disagg['vapor_pressure'].values,
109109
df_disagg['tskc'].values, params)
110-
111-
df_disagg['prec'] = prec(df_daily['prec'].values, df_daily['t_min'].values,
112-
ts, params, df_daily.index.month)
110+
df_disagg['prec'] = prec(df_daily['prec'], df_daily['t_min'], ts, params,
111+
df_daily.get('t_pk'), df_daily.get('dur'))
113112

114113
if 'wind' in df_daily:
115114
df_disagg['wind'] = wind(df_daily['wind'].values, ts, params)
@@ -252,7 +251,7 @@ def temp(t_min: np.array, t_max: np.array, out_len: int,
252251

253252

254253
def prec(prec: pd.Series, t_min: pd.Series, ts: float, params: dict,
255-
month_of_year: int):
254+
t_pk=None, dur=None):
256255
"""
257256
Distributes sub-daily precipitation either evenly (uniform) or with a
258257
triangular (triangle) distribution, depending upon the chosen method.
@@ -281,127 +280,59 @@ def prec(prec: pd.Series, t_min: pd.Series, ts: float, params: dict,
281280
prec:
282281
A sub-daily timeseries of precipitation. [mm]
283282
"""
284-
def prec_TRIANGLE(prec: pd.Series, t_min: pd.Series,
285-
month_of_year: int, do_mix: bool, params: dict):
286-
287-
def P_kernel(t_corners, m, t):
288-
# calculating precipitation intensity of current time t
289-
t_start = t_corners[0]
290-
t_pk = t_corners[1]
291-
t_end = t_corners[2]
292-
if (t < t_start) | (t > t_end):
293-
P = 0
294-
elif t <= t_pk:
295-
P = m * (t - t_start)
296-
elif t >= t_pk:
297-
P = m * (t_end - t)
298-
return P
299-
300-
dur = params['dur']
301-
t_pk = params['t_pk']
302-
n_days = len(prec)
303-
steps_per_day = cnst.MIN_PER_HOUR * cnst.HOURS_PER_DAY
304-
offset = np.ceil(steps_per_day / 2)
305-
output_length = int(steps_per_day * n_days)
306-
index = np.arange(output_length)
307-
steps_per_two_days = int(((np.ceil(steps_per_day / 2)) * 2) +
308-
steps_per_day)
309-
# time of step on next day [minutes]
310-
t_next = 2 * cnst.MIN_PER_DAY + 2
311-
P_return = pd.Series(np.zeros(output_length, dtype='float'),
312-
index=index)
313-
314-
# create kernel of unit hyetographs, one for each month
315-
kernels = np.zeros(shape=(12, steps_per_two_days))
316-
317-
for month in np.arange(12, dtype=int):
318-
# Calculating the unit hyetograph (kernels) with area of 1 mm
319-
320-
# Computing monthly constants
321-
P_pk = 2. / dur[month] # peak precipitation intensity
322-
m = P_pk / (dur[month] / 2.) # slope of precipitation rising limb
323-
# time of storm start [minutes]
324-
t_start = (t_pk[month] - (dur[month] / 2.))
325-
# time of storm end [minutes]
326-
t_end = (t_pk[month] + (dur[month] / 2.))
327-
# time step of storm start
328-
i_start = int(np.floor(t_start) + offset)
329-
# time step of storm end
330-
i_end = int(np.floor(t_end) + offset)
331-
332-
# Initializing timestep variables
333-
i = i_start # current timestep
334-
t = i_start # current time [minutes]
335-
t_0 = t # start time of current timestep [minutes]
336-
# times of key corners of unit hyetograph
337-
t_corners = [t_start, t_pk[month], t_end, t_next] + offset
338-
c = 0 # current corner index
339-
# end time of current timestep [minutes]
340-
t_1 = min(t_0, t_corners[c])
341-
area = 0 # area under curve for current timestep
342-
343-
# Looping through kernel timesteps
344-
while i <= i_end:
345-
P_0 = P_kernel(t_corners, m, t_0)
346-
P_1 = P_kernel(t_corners, m, t_1)
347-
area += 0.5 * (P_0 + P_1) * (t_1 - t_0)
348-
if t_1 == t: # end of timestep check
349-
kernels[month, i] = area
350-
area = 0
351-
t += 1
352-
i += 1
353-
if t_1 == t_corners[c]:
354-
c += 1
355-
t_0 = t_1
356-
t_1 = min(t, t_corners[c])
357-
358-
# Loop through each rain day of the timeseries and apply the kernel for
359-
# the appropriate month of year
360-
rain_days = np.asarray(np.where(prec > 0))[0]
361-
if len(rain_days) > 0:
362-
for d in rain_days:
363-
mon = month_of_year[d] - 1
364-
if do_mix and t_min[d] < 0:
365-
i0 = d * steps_per_day
366-
i1 = i0 + steps_per_day
367-
P_return[i0:i1] += prec[d] * 1 / steps_per_day
368-
elif d == 0:
369-
# beginning of kernel is clipped;
370-
# rescale so that clipped kernel sums to original total
371-
k0 = int(np.ceil(steps_per_day / 2))
372-
i1 = int(np.ceil(steps_per_day * 1.5))
373-
P_return[:i1] += (prec[d] / sum(kernels[mon, k0:])
374-
* kernels[mon, k0:])
375-
elif d == (n_days - 1):
376-
# end of kernel is clipped;
377-
# rescale so that clipped kernel sums to original total
378-
k1 = int(np.ceil(steps_per_day * 1.5))
379-
i0 = int(np.floor((d - 0.5) * steps_per_day))
380-
P_return[i0:] += (prec[d] / sum(kernels[mon, :k1])
381-
* kernels[mon, :k1])
382-
else:
383-
i0 = int(np.floor((d - 0.5) * steps_per_day))
384-
i1 = int(i0 + (2 * steps_per_day))
385-
P_return[i0:i1] += prec[d] * kernels[mon]
386-
P_return = np.around(P_return, decimals=5)
387-
return P_return.values
283+
#def prec_TRIANGLE(prec: pd.Series, t_min: pd.Series,
284+
# month_of_year: int, do_mix: bool, params: dict):
285+
def prec_TRIANGLE(daily_prec, t_min, prec_peak, prec_dur, ts, do_mix):
286+
'''Triangular disaggregation'''
287+
ts_per_day = int(cnst.HOURS_PER_DAY * cnst.MIN_PER_HOUR)
288+
n_days = len(daily_prec)
289+
# Scale to desired timestep
290+
#prec_peak /= ts
291+
#prec_dur /= ts
292+
disagg_prec = np.zeros(int(n_days*ts_per_day))
293+
294+
# Loop over days
295+
for i, (t, P) in enumerate(daily_prec.iteritems()):
296+
times_day = np.arange(ts_per_day)
297+
prec_day = np.zeros(ts_per_day)
298+
299+
# Look up climatology
300+
t_pk = prec_peak[t]
301+
t_dur = prec_dur[t]
302+
303+
# Rising and falling time
304+
t_start = t_pk - 0.5 * t_dur
305+
t_stop = t_pk + 0.5 * t_dur
306+
t_plus = times_day[
307+
np.logical_and(times_day <= t_pk, times_day >= t_start)]
308+
t_minus = times_day[
309+
np.logical_and(times_day >= t_pk, times_day <= t_stop)]
310+
311+
# Begin with relative intensity
312+
prec_day[t_plus] = np.linspace(0, 1.0, len(t_plus))
313+
prec_day[t_minus] = np.linspace(1.0, 0, len(t_minus))
314+
315+
# Scale to input precipitation
316+
prec_day = (P / np.sum(prec_day)) * prec_day
317+
disagg_prec[i*ts_per_day:(i+1)*ts_per_day] = prec_day
318+
return disagg_prec
388319

389320
def prec_UNIFORM(prec: pd.Series, *args):
390-
return np.repeat(prec / cnst.MIN_PER_DAY, cnst.MIN_PER_DAY)
321+
return np.repeat(prec.values / cnst.MIN_PER_DAY, cnst.MIN_PER_DAY)
391322

392323
prec_function = {
393324
'UNIFORM': prec_UNIFORM,
394325
'TRIANGLE': prec_TRIANGLE,
395326
'MIX': prec_TRIANGLE,
396327
}
397-
if params['prec_type'].upper() == 'MIX':
398-
do_mix = True
328+
if params['prec_type'].upper() in ['TRIANGLE', 'MIX']:
329+
if params['prec_type'].upper() == 'MIX':
330+
do_mix = True
331+
else:
332+
do_mix = False
333+
P_return = prec_TRIANGLE(prec, t_min, t_pk, dur, ts, do_mix)
399334
else:
400-
do_mix = False
401-
402-
P_return = prec_function[params['prec_type'].upper()](prec, t_min,
403-
month_of_year,
404-
do_mix, params)
335+
P_return = prec_UNIFORM(prec)
405336

406337
if params['utc_offset']:
407338
offset = int(((params["lon"] / cnst.DEG_PER_REV) * cnst.MIN_PER_DAY))

metsim/io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ def read_ascii(data_handle, domain=None, is_worker=False,
254254
var_dict=None) -> xr.Dataset:
255255
"""Read in an ascii forcing file"""
256256
dates = date_range(start, stop, calendar=calendar)
257-
names = var_dict.keys()
257+
names = list(var_dict.keys())
258258
ds = pd.read_csv(data_handle, header=None, delim_whitespace=True,
259259
names=names).head(len(dates))
260260
ds.index = dates

metsim/metsim.py

Lines changed: 12 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,7 @@ class MetSim(object):
152152
"utc_offset": False,
153153
"lw_cloud": 'cloud_deardorff',
154154
"lw_type": 'prata',
155+
"prec_type": 'uniform',
155156
"tdew_tol": 1e-6,
156157
"tmax_daylength_fraction": 0.67,
157158
"rain_scalar": 0.75,
@@ -235,7 +236,6 @@ def _update_unit_attrs(self, out_vars):
235236
v['units'] = available_outputs[k]['units']
236237

237238
def _validate_force_times(self, force_times):
238-
239239
for p, i in [('start', 0), ('stop', -1)]:
240240
# infer times from force_times
241241
if isinstance(self.params[p], str):
@@ -270,6 +270,14 @@ def _validate_force_times(self, force_times):
270270
'long_name': 'local time at grid location',
271271
'standard_name': 'local_time'}
272272

273+
def convert_monthly_param(self, name):
274+
self.met_data[name] = self.met_data['prec'].copy()
275+
months = self.met_data['time'].dt.month
276+
for m in range(12):
277+
param = self.domain[name].sel(month=m)
278+
locations = {'time': self.met_data['time'].isel(time=months == m)}
279+
self.met_data[name].loc[locations] = param
280+
273281
@property
274282
def domain(self):
275283
if self._domain is None:
@@ -458,14 +466,12 @@ def run_slice(self):
458466
params['tday_coef'] = float(params['tday_coef'])
459467
params['tmax_daylength_fraction'] = float(params['tmax_daylength_fraction'])
460468
params['lapse_rate'] = float(params['lapse_rate'])
469+
if self.params['prec_type'].upper() in ['TRIANGLE', 'MIX']:
470+
self.convert_monthly_param('dur')
471+
self.convert_monthly_param('t_pk')
461472
for index, mask_val in np.ndenumerate(self.domain['mask'].values):
462473
if mask_val > 0:
463474
locs = {d: i for d, i in zip(self.domain['mask'].dims, index)}
464-
if self.params['prec_type'].upper() in ['TRIANGLE', 'MIX']:
465-
# add variables for triangle precipitation disgregation
466-
# method to parameters
467-
params['dur'], params['t_pk'] = (
468-
add_prec_tri_vars(self.domain.isel(**locs)))
469475
else:
470476
continue
471477

@@ -826,52 +832,3 @@ def __exit__(self, *args):
826832
@property
827833
def locked(self):
828834
return False
829-
830-
831-
def add_prec_tri_vars(domain):
832-
"""
833-
Check that variables for triangle precipitation method exist and have
834-
values that are within allowable ranges. Return these variables.
835-
836-
Parameters
837-
----------
838-
domain:
839-
Dataset of domain variables for given location.
840-
841-
Returns
842-
-------
843-
dur
844-
Array of climatological monthly storm durations. [minutes]
845-
t_pk
846-
Array of climatological monthly times to storm peaks. [minutes]
847-
"""
848-
# Check that variables exist
849-
try:
850-
dur = domain['dur']
851-
except Exception as e:
852-
raise e("Storm duration and time to peak values are "
853-
"required in the domain file for the triangle "
854-
"preciptation disagregation method.")
855-
try:
856-
t_pk = domain['t_pk']
857-
except Exception as e:
858-
raise e("Storm duration and time to peak values are "
859-
"required in the domain file for the triangle "
860-
"preciptation disagregation method.")
861-
862-
# Check that variable values are within allowable ranges.
863-
day_length = cnst.MIN_PER_HOUR * cnst.HOURS_PER_DAY
864-
dur_zero_test = dur <= 0
865-
dur_day_test = dur > day_length
866-
if dur_zero_test.any() or dur_day_test.any():
867-
raise ValueError('Storm duration must be greater than 0 and less than',
868-
day_length, '(i.e. the day length in minutes)')
869-
870-
t_pk_zero_test = t_pk < 0
871-
t_pk_day_test = t_pk > day_length
872-
if t_pk_zero_test.any() or t_pk_day_test.any():
873-
raise ValueError('Storm time to peak must be greater than or equal to '
874-
'0, and less than', day_length,
875-
'(i.e. the end of a day)')
876-
877-
return dur, t_pk

0 commit comments

Comments
 (0)