2828from openquake .baselib .general import (
2929 AccumDict , humansize , groupby , block_splitter )
3030from openquake .hazardlib .probability_map import ProbabilityMap , get_mean_curve
31- from openquake .hazardlib .geo .point import Point
3231from openquake .hazardlib .stats import geom_avg_std , compute_stats
3332from openquake .hazardlib .calc .stochastic import sample_ruptures
3433from openquake .hazardlib .gsim .base import ContextMaker , FarAwayRupture
35- from openquake .hazardlib .calc .filters import nofilter , getdefault , SourceFilter
34+ from openquake .hazardlib .calc .filters import (
35+ nofilter , getdefault , get_distances , SourceFilter )
3636from openquake .hazardlib .calc .gmf import GmfComputer
3737from openquake .hazardlib .calc .conditioned_gmfs import ConditionedGmfComputer
3838from openquake .hazardlib import InvalidFile
@@ -296,23 +296,23 @@ def todict(dframe):
296296 return {k : dframe [k ].to_numpy () for k in dframe .columns }
297297
298298
299- def filter_stations (station_df , complete , hypo , maxdist ):
299+ def filter_stations (station_df , complete , rup , maxdist ):
300300 """
301301 :param station_df: DataFrame with the stations
302302 :param complete: complete SiteCollection
303- :param hypo: hypocenter of the rupture
303+ :param rup: rupture
304304 :param maxdist: maximum distance
305305 :returns: filtered (station_df, station_sitecol)
306306 """
307307 ns = len (station_df )
308- ok = (hypo . distance_to_mesh ( complete . mesh ) <= maxdist ) & numpy .isin (
308+ ok = (get_distances ( rup , complete , 'rrup' ) <= maxdist ) & numpy .isin (
309309 complete .sids , station_df .index )
310- close = complete .filter (ok )
311- station_data = station_df [numpy .isin (station_df .index , close .sids )]
310+ station_sites = complete .filter (ok )
311+ station_data = station_df [numpy .isin (station_df .index , station_sites .sids )]
312312 if len (station_data ) < ns :
313313 logging .info ('Discarded %d/%d stations more distant than %d km' ,
314314 ns - len (station_data ), ns , maxdist )
315- return station_data , close
315+ return station_data , station_sites
316316
317317
318318# NB: save_tmp is passed in event_based_risk
@@ -326,13 +326,15 @@ def starmap_from_rups(func, oq, full_lt, sitecol, dstore, save_tmp=None):
326326 logging .info ('Affected sites = %.1f per rupture' , rups ['nsites' ].mean ())
327327 allproxies = [RuptureProxy (rec ) for rec in rups ]
328328 if "station_data" in oq .inputs :
329+ trt = full_lt .trts [0 ]
329330 proxy = allproxies [0 ]
331+ proxy .geom = dstore ['rupgeoms' ][proxy ['geom_id' ]]
332+ rup = proxy .to_ebr (trt ).rupture
330333 station_df = dstore .read_df ('station_data' , 'site_id' )
331334 maxdist = (oq .maximum_distance_stations or
332335 oq .maximum_distance ['default' ][- 1 ][1 ])
333- hypo = Point (* proxy ['hypo' ])
334336 station_data , station_sites = filter_stations (
335- station_df , sitecol .complete , hypo , maxdist )
337+ station_df , sitecol .complete , rup , maxdist )
336338 else :
337339 station_data , station_sites = None , None
338340
@@ -341,9 +343,8 @@ def starmap_from_rups(func, oq, full_lt, sitecol, dstore, save_tmp=None):
341343 oq .concurrent_tasks or 1 )
342344 logging .info ('totw = {:_d}' .format (round (totw )))
343345 if "station_data" in oq .inputs :
344- proxy .geom = dstore ['rupgeoms' ][proxy ['geom_id' ]]
345346 rlzs_by_gsim = full_lt .get_rlzs_by_gsim (0 )
346- cmaker = ContextMaker (full_lt . trts [ 0 ] , rlzs_by_gsim , oq )
347+ cmaker = ContextMaker (trt , rlzs_by_gsim , oq )
347348 maxdist = oq .maximum_distance (cmaker .trt )
348349 srcfilter = SourceFilter (sitecol .complete , maxdist )
349350 sids = srcfilter .close_sids (proxy , cmaker .trt )
0 commit comments