Skip to content

Commit

Permalink
Merge pull request #10423 from gem/losses_by_site
Browse files Browse the repository at this point in the history
Added a `losses_by_site` extractor (i.e. 1000x speedup)
  • Loading branch information
micheles authored Mar 7, 2025
2 parents 6c36004 + 1930dc3 commit 8a209c8
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 19 deletions.
2 changes: 2 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
[Michele Simionato]
* Added an extractor `losses_by_site`
* Improved `oq info sources`
* Internal: accepted `source_model_file` in place of
`source_model_logic_tree_file`
* Extended `source_model.serialise_to_nrml` to accept a `mesh_spacing`
Expand Down
15 changes: 15 additions & 0 deletions openquake/calculators/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -1049,6 +1049,21 @@ def extract_losses_by_asset(dstore, what):
yield 'rlz-000', data


@extract.add('losses_by_site')
def extract_losses_by_site(dstore, what):
"""
:returns: a DataFrame (lon, lat, number, structural, ...)
"""
sitecol = dstore['sitecol']
dic = {'lon': F32(sitecol.lons), 'lat': F32(sitecol.lats)}
array = dstore['assetcol/array'][:][['site_id', 'lon', 'lat']]
grp = dstore.getitem('avg_losses-stats')
for loss_type in grp:
losses = grp[loss_type][:, 0]
dic[loss_type] = F32(general.fast_agg(array['site_id'], losses))
return pandas.DataFrame(dic)


def _gmf(df, num_sites, imts, sec_imts):
# convert data into the composite array expected by QGIS
gmfa = numpy.zeros(num_sites, [(imt, F32) for imt in imts + sec_imts])
Expand Down
7 changes: 6 additions & 1 deletion openquake/calculators/tests/scenario_risk_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from openquake.hazardlib.gsim_lt import InvalidLogicTree
from openquake.calculators.tests import (
CalculatorTestCase, ignore_gsd_fields, strip_calc_id)
from openquake.calculators.views import view
from openquake.calculators.views import view, text_table
from openquake.calculators.export import export
from openquake.calculators.extract import extract
from openquake.qa_tests_data.scenario_risk import (
Expand Down Expand Up @@ -185,6 +185,11 @@ def test_case_master(self):
# sensitive to shapely version
self.assertEqualFiles('expected/portfolio_loss.txt', fname, delta=1E-3)

# losses_by_site
df = extract(self.calc.datastore, 'losses_by_site')
fname = gettemp(text_table(df, ext='org'))
self.assertEqualFiles('expected/losses_by_site.org', fname, delta=1E-3)

def test_collapse_gsim_logic_tree(self):
self.run_calc(case_master.__file__, 'job.ini',
collapse_gsim_logic_tree='bs1')
Expand Down
4 changes: 2 additions & 2 deletions openquake/commands/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
from openquake.baselib import performance, hdf5
from openquake.baselib import general, performance, hdf5
from openquake.commonlib import logs
from openquake.calculators.extract import Extractor, WebExtractor

Expand Down Expand Up @@ -49,7 +49,7 @@ def main(what,
else: # save as npz
fname = os.path.join(extract_dir, '%s_%d.npz' % (w, calc_id))
hdf5.save_npz(aw, fname)
print('Saved', fname)
print(f'Saved {general.humansize(os.path.getsize(fname))} in {fname}')
if mon.duration > 1:
print(mon)

Expand Down
36 changes: 20 additions & 16 deletions openquake/hazardlib/tests/shakemap/parsers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,9 @@ def test_1(self):
_rup, _rupdic, err = get_rup_dic(
{'usgs_id': 'usp0001cc', 'approach': 'use_shakemap_from_usgs'},
User(level=2, testdir=''), use_shakemap=True)
self.assertIn('Unable to download from https://earthquake.usgs.gov/fdsnws/'
'event/1/query?eventid=usp0001cc&', err['error_msg'])
self.assertIn(
'Unable to download from https://earthquake.usgs.gov/fdsnws/'
'event/1/query?eventid=usp0001cc&', err['error_msg'])

def test_2(self):
_rup, dic, _err = get_rup_dic(
Expand Down Expand Up @@ -111,8 +112,8 @@ def test_5(self):
{'usgs_id': 'us20002926', 'approach': 'use_shakemap_from_usgs'},
user=user, use_shakemap=True)
self.assertIsNone(rup)
rupture_issue = ('Unable to convert the rupture from the USGS format: at'
' least one surface is not rectangular')
rupture_issue = ('Unable to convert the rupture from the USGS format: '
'at least one surface is not rectangular')
self.assertEqual(dic['rupture_issue'], rupture_issue)

def test_6(self):
Expand All @@ -124,21 +125,23 @@ def test_6(self):
'3 stations were found, but none of them are seismic')

def test_7(self):
dic_in = {'usgs_id': 'us6000jllz', 'lon': None, 'lat': None, 'dep': None,
'mag': None, 'msr': '', 'aspect_ratio': 2, 'rake': None,
'dip': None, 'strike': None, 'approach': 'build_rup_from_usgs'}
dic_in = {
'usgs_id': 'us6000jllz', 'lon': None, 'lat': None, 'dep': None,
'mag': None, 'msr': '', 'aspect_ratio': 2, 'rake': None,
'dip': None, 'strike': None, 'approach': 'build_rup_from_usgs'}
_rup, dic, _err = get_rup_dic(dic_in, user=user, use_shakemap=True)
self.assertEqual(
dic['nodal_planes'],
{'NP1': {'dip': 88.71, 'rake': -179.18, 'strike': 317.63},
'NP2': {'dip': 89.18, 'rake': -1.29, 'strike': 227.61}})

def test_7b(self):
# Case reading nodal planes first from the moment-tensor (not found) then
# falling back to reading them from the focal-mechanism
dic_in = {'usgs_id': 'usp0001ccb', 'lon': None, 'lat': None, 'dep': None,
'mag': None, 'msr': '', 'aspect_ratio': 2, 'rake': None,
'dip': None, 'strike': None, 'approach': 'build_rup_from_usgs'}
# Case reading nodal planes first from the moment-tensor (not found)
# then falling back to reading them from the focal-mechanism
dic_in = {
'usgs_id': 'usp0001ccb', 'lon': None, 'lat': None, 'dep': None,
'mag': None, 'msr': '', 'aspect_ratio': 2, 'rake': None,
'dip': None, 'strike': None, 'approach': 'build_rup_from_usgs'}
_rup, dic, _err = get_rup_dic(dic_in, user=user, use_shakemap=True)
self.assertEqual(
dic['nodal_planes'],
Expand All @@ -155,10 +158,11 @@ def test_8(self):
self.assertAlmostEqual(rup.surface.width, 0.0070800)

def test_9(self):
dic_in = {'usgs_id': 'us6000jllz', 'lon': 37.0143, 'lat': 37.2256, 'dep': 10,
'mag': 7.8, 'msr': 'WC1994', 'aspect_ratio': 3,
'rake': -179.18, 'dip': 88.71, 'strike': 317.63,
'approach': 'build_rup_from_usgs'}
dic_in = {
'usgs_id': 'us6000jllz', 'lon': 37.0143, 'lat': 37.2256, 'dep': 10,
'mag': 7.8, 'msr': 'WC1994', 'aspect_ratio': 3,
'rake': -179.18, 'dip': 88.71, 'strike': 317.63,
'approach': 'build_rup_from_usgs'}
_rup, dic, _err = get_rup_dic(dic_in, user=user, use_shakemap=True)
self.assertEqual(dic['dep'], 10)
self.assertEqual(dic['dip'], 88.71)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
| lon | lat | business_interruption | contents | nonstructural | occupants | structural |
|----------+---------+-----------------------+----------+---------------+-----------+------------|
| -122.57 | 38.1130 | 47.8093 | 279.6 | 418.9 | 0.0014 | 115.3327 |
| -122.114 | 38.1130 | 137.7384 | 2_211 | 3_113 | 0.0040 | 310.4 |
| -122.0 | 37.9100 | 139.4481 | 2_193 | 3_584 | 0.0042 | 478.7 |
| -122.0 | 38.0000 | 510.3 | 4_115 | 10_547 | 0.0146 | 2_142 |
| -122.0 | 38.1130 | 494.7 | 2_817 | 4_080 | 0.0142 | 2_369 |
| -122.0 | 38.2250 | 446.1 | 4_002 | 8_102 | 0.0145 | 1_200 |
| -121.886 | 38.1130 | 145.1879 | 1_036 | 1_347 | 0.0042 | 723.4 |

0 comments on commit 8a209c8

Please sign in to comment.