Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 21 additions & 17 deletions openquake/calculators/event_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,13 +188,20 @@ def strip_zeros(gmf_df):
return gmf_df[ok]


def get_computer(cmaker, proxy, sids, complete, station_data, station_sitecol):
def get_computer(cmaker, proxy, rupgeoms, srcfilter,
station_data, station_sitecol):
"""
:returns: GmfComputer or ConditionedGmfComputer
"""
sids = srcfilter.close_sids(proxy, cmaker.trt)
if len(sids) == 0: # filtered away
raise FarAwayRupture

complete = srcfilter.sitecol.complete
proxy.geom = rupgeoms[proxy['geom_id']]
ebr = proxy.to_ebr(cmaker.trt)
oq = cmaker.oq
trt = cmaker.trt
ebr = proxy.to_ebr(trt)

if station_sitecol:
stations = numpy.isin(sids, station_sitecol.sids)
assert stations.sum(), 'There are no stations??'
Expand Down Expand Up @@ -246,6 +253,7 @@ def event_based(proxies, cmaker, stations, dstore, monitor):
times = [] # rup_id, nsites, dt
fmon = monitor('filtering ruptures', measuremem=False)
cmon = monitor('computing gmfs', measuremem=False)
rmon = monitor('reading mea,tau,phi', measuremem=False)
max_iml = oq.get_max_iml()
scenario = 'scenario' in oq.calculation_mode
with dstore:
Expand All @@ -260,22 +268,17 @@ def event_based(proxies, cmaker, stations, dstore, monitor):
with fmon:
if proxy['mag'] < cmaker.min_mag:
continue
sids = srcfilter.close_sids(proxy, cmaker.trt)
if len(sids) == 0: # filtered away
continue
proxy.geom = rupgeoms[proxy['geom_id']]
try:
computer = get_computer(
cmaker, proxy, sids, sitecol.complete, *stations)
cmaker, proxy, rupgeoms, srcfilter, *stations)
except FarAwayRupture:
# skip this rupture
continue
with cmon:
if hasattr(computer, 'station_data'): # conditioned GMFs
assert scenario
df = computer.compute_all(dstore, sig_eps, max_iml)
else: # regular GMFs
df = computer.compute_all(scenario, sig_eps, max_iml)
if hasattr(computer, 'station_data'): # conditioned GMFs
assert scenario
df = computer.compute_all(dstore, sig_eps, max_iml, cmon, rmon)
else: # regular GMFs
df = computer.compute_all(scenario, sig_eps, max_iml, cmon)
dt = time.time() - t0
times.append((proxy['id'], computer.ctx.rrup.min(), dt))
alldata.append(df)
Expand Down Expand Up @@ -326,9 +329,10 @@ def starmap_from_rups(func, oq, full_lt, sitecol, dstore, save_tmp=None):
logging.info('Affected sites = %.1f per rupture', rups['nsites'].mean())
allproxies = [RuptureProxy(rec) for rec in rups]
if "station_data" in oq.inputs:
rupgeoms = dstore['rupgeoms'][:]
trt = full_lt.trts[0]
proxy = allproxies[0]
proxy.geom = dstore['rupgeoms'][proxy['geom_id']]
proxy.geom = rupgeoms[proxy['geom_id']]
rup = proxy.to_ebr(trt).rupture
station_df = dstore.read_df('station_data', 'site_id')
maxdist = (oq.maximum_distance_stations or
Expand All @@ -347,11 +351,11 @@ def starmap_from_rups(func, oq, full_lt, sitecol, dstore, save_tmp=None):
cmaker = ContextMaker(trt, rlzs_by_gsim, oq)
maxdist = oq.maximum_distance(cmaker.trt)
srcfilter = SourceFilter(sitecol.complete, maxdist)
sids = srcfilter.close_sids(proxy, cmaker.trt)
computer = get_computer(
cmaker, proxy, sids, sitecol.complete,
cmaker, proxy, rupgeoms, srcfilter,
station_data, station_sites)
ms, sids = computer.get_ms_and_sids()
del proxy.geom # to reduce data transfer
dstore.create_dset('conditioned/sids', sids)
keys = ['mea', 'sig', 'tau', 'phi']
for g, gsim in enumerate(ms):
Expand Down
42 changes: 25 additions & 17 deletions openquake/hazardlib/calc/conditioned_gmfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@
import pandas
from openquake.baselib.python3compat import decode
from openquake.baselib.general import AccumDict
from openquake.baselib.performance import Monitor
from openquake.hazardlib import correlation, cross_correlation
from openquake.hazardlib.imt import from_string
from openquake.hazardlib.calc.gmf import GmfComputer, exp
Expand Down Expand Up @@ -221,7 +222,8 @@ def get_ms_and_sids(self):
self.cross_correl_between, self.cross_correl_within,
self.cmaker.maximum_distance)

def compute_all(self, dstore, sig_eps=None, max_iml=None):
def compute_all(self, dstore, sig_eps=None, max_iml=None,
mon1=Monitor(), mon2=Monitor()):
"""
:returns: (dict with fields eid, sid, gmv_X, ...), dt
"""
Expand All @@ -236,9 +238,13 @@ def compute_all(self, dstore, sig_eps=None, max_iml=None):
# NB: ms is a dictionary gsim -> [imt -> array]
sids = dstore['conditioned/sids'][:]
for g, (gsim, rlzs) in enumerate(rlzs_by_gsim.items()):
mean_covs = [dstore['conditioned/gsim_%d/%s' % (g, key)][:]
for key in ('mea', 'sig', 'tau', 'phi')]
array, sig, eps = self.compute(gsim, num_events, mean_covs, rng)
with mon1:
mea = dstore['conditioned/gsim_%d/mea' % g][:]
tau = dstore['conditioned/gsim_%d/tau' % g][:]
phi = dstore['conditioned/gsim_%d/phi' % g][:]
with mon2:
array, sig, eps = self.compute(
gsim, num_events, mea, tau, phi, rng)
M, N, E = array.shape # sig and eps have shapes (M, E) instead
assert len(sids) == N, (len(sids), N)

Expand All @@ -248,7 +254,7 @@ def compute_all(self, dstore, sig_eps=None, max_iml=None):
if (array[m] > max_iml[m]).any():
for n in range(N):
bad = array[m, n] > max_iml[m] # shape E
mean = mean_covs[0][m] # shape (N, 1)
mean = mea[m] # shape (N, 1)
array[m, n, bad] = exp(mean[n, 0], im)

# manage min_iml
Expand Down Expand Up @@ -286,29 +292,31 @@ def compute_all(self, dstore, sig_eps=None, max_iml=None):
n += len(eids)
return pandas.DataFrame(data)

def compute(self, gsim, num_events, mean_covs, rng):
def compute(self, gsim, num_events, mea, tau, phi, rng):
"""
:param gsim: GSIM used to compute mean_stds
:param num_events: the number of seismic events
:param mean_covs: array of shape (4, M, N)
:param mea: array of shape (M, N, 1)
:param tau: array of shape (M, N, N)
:param phi: array of shape (M, N, N)
:returns:
a 32 bit array of shape (num_imts, num_sites, num_events) and
two arrays with shape (num_imts, num_events): sig for tau
and eps for the random part
"""
M = len(self.imts)
num_sids = mean_covs[0][0].size
result = numpy.zeros((M, num_sids, num_events), F32)
M, N, _ = mea.shape
result = numpy.zeros((M, N, num_events), F32)
sig = numpy.zeros((M, num_events), F32) # same for all events
eps = numpy.zeros((M, num_events), F32) # not the same

for m, im in enumerate(self.imts):
mu_Y_yD = mean_covs[0][m]
cov_WY_WY_wD = mean_covs[2][m]
cov_BY_BY_yD = mean_covs[3][m]
for m, im in enumerate(self.cmaker.imtls):
mu_Y_yD = mea[m]
cov_WY_WY_wD = tau[m]
cov_BY_BY_yD = phi[m]
try:
result[m], sig[m], eps[m] = self._compute(
mu_Y_yD, cov_WY_WY_wD, cov_BY_BY_yD, im, num_events, rng)
mu_Y_yD, cov_WY_WY_wD, cov_BY_BY_yD,
im, num_events, rng)
except Exception as exc:
raise RuntimeError(
"(%s, %s, source_id=%r) %s: %s"
Expand Down Expand Up @@ -683,8 +691,8 @@ def _compute_spatial_cross_correlation_matrix(
return spatial_correlation_matrix * cross_corr_coeff


def clip_evals(x, value=0): # threshold=0, value=0):
evals, evecs = numpy.linalg.eigh(x)
def clip_evals(x, value=0):
evals, evecs = numpy.linalg.eigh(x) # totally dominates the performance
clipped = numpy.any(evals < value)
x_new = evecs * numpy.maximum(evals, value) @ evecs.T
return x_new, clipped
Expand Down
18 changes: 11 additions & 7 deletions openquake/hazardlib/calc/gmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import pandas

from openquake.baselib.general import AccumDict
from openquake.baselib.performance import Monitor
from openquake.hazardlib.const import StdDev
from openquake.hazardlib.cross_correlation import NoCrossCorrelation
from openquake.hazardlib.gsim.base import ContextMaker, FarAwayRupture
Expand Down Expand Up @@ -51,7 +52,7 @@ def exp(vals, imt):
"""
Exponentiate the values unless the IMT is MMI
"""
if str(imt) == 'MMI':
if imt == 'MMI':
return vals
return numpy.exp(vals)

Expand Down Expand Up @@ -130,7 +131,7 @@ def __init__(self, rupture, sitecol, cmaker, correlation_model=None,
self.cross_correl = cross_correl or NoCrossCorrelation(
cmaker.truncation_level)

def compute_all(self, scenario, sig_eps=None, max_iml=None):
def compute_all(self, scenario, sig_eps=None, max_iml=None, mon=Monitor()):
"""
:returns: DataFrame with fields eid, rlz, sid, gmv_X, ...
"""
Expand All @@ -148,7 +149,8 @@ def compute_all(self, scenario, sig_eps=None, max_iml=None):
# NB: the trick for performance is to keep the call to
# .compute outside of the loop over the realizations;
# it is better to have few calls producing big arrays
array, sig, eps = self.compute(gs, num_events, mean_stds[:, g])
with mon:
array, sig, eps = self.compute(gs, num_events, mean_stds[:, g])
M, N, E = array.shape # sig and eps have shapes (M, E) instead

# manage max_iml
Expand All @@ -157,7 +159,8 @@ def compute_all(self, scenario, sig_eps=None, max_iml=None):
if (array[m] > max_iml[m]).any():
for n in range(N):
bad = array[m, n] > max_iml[m] # shape E
array[m, n, bad] = exp(mean_stds[0, g, m, n], im)
array[m, n, bad] = exp(
mean_stds[0, g, m, n], im)

# manage min_iml
for n in range(N):
Expand Down Expand Up @@ -238,13 +241,14 @@ def compute(self, gsim, num_events, mean_stds):
return result, sig, eps

def _compute(self, mean_stds, imt, gsim, intra_eps, inter_eps):
im = imt.string
if self.cmaker.truncation_level <= 1E-9:
# for truncation_level = 0 there is only mean, no stds
if self.correlation_model:
raise ValueError('truncation_level=0 requires '
'no correlation model')
mean, _, _, _ = mean_stds
gmf = exp(mean, imt)[:, None]
gmf = exp(mean, im)[:, None]
gmf = gmf.repeat(len(inter_eps), axis=1)
inter_sig = 0
elif gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {StdDev.TOTAL}:
Expand All @@ -257,7 +261,7 @@ def _compute(self, mean_stds, imt, gsim, intra_eps, inter_eps):
self.correlation_model, gsim)

mean, sig, _, _ = mean_stds
gmf = exp(mean[:, None] + sig[:, None] * intra_eps, imt)
gmf = exp(mean[:, None] + sig[:, None] * intra_eps, im)
inter_sig = numpy.nan
else:
mean, sig, tau, phi = mean_stds
Expand All @@ -274,7 +278,7 @@ def _compute(self, mean_stds, imt, gsim, intra_eps, inter_eps):
intra_res = intra_res[:, None]

inter_res = tau[:, None] * inter_eps # shape (N, 1) * E => (N, E)
gmf = exp(mean[:, None] + intra_res + inter_res, imt) # (N, E)
gmf = exp(mean[:, None] + intra_res + inter_res, im) # (N, E)
inter_sig = tau.max() # from shape (N, 1) => scalar
return gmf, inter_sig, inter_eps # shapes (N, E), 1, E

Expand Down