Skip to content

Commit 9b4531a

Browse files
Merge pull request #10 from GEMScienceTools/slf-generator
Add SLF generation functionality
2 parents a80fffd + fee9e01 commit 9b4531a

File tree

177 files changed

+19067
-2033
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

177 files changed

+19067
-2033
lines changed

.github/workflows/linux_test.yml

+4
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,10 @@ jobs:
4040
- name: Install vmtk Package
4141
run: |
4242
source ~/openquake/bin/activate
43+
PY_VER=`echo py${{ matrix.python-version }} | tr -d .`
44+
echo $PY_VER
45+
export PIP_DEFAULT_TIMEOUT=100
46+
pip install -r requirements-$PY_VER-linux.txt
4347
pip install -e .
4448
- name: Run tests
4549
run: |

.github/workflows/windows_test.yml

+5-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
fail-fast: false
2323
matrix:
2424
os: [windows-2022]
25-
python-version: ["3.10", "3.11", "3.12"]
25+
python-version: ["3.11", "3.12"]
2626
steps:
2727
- uses: actions/checkout@v4
2828
- name: Set up Python ${{ matrix.python-version }}
@@ -37,6 +37,10 @@ jobs:
3737
run: |
3838
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
3939
C:\Users\runneradmin\openquake\Scripts\activate.ps1
40+
$PY_VER="py${{ matrix.python-version }}"
41+
$py = $PY_VER.replace(".","")
42+
set PIP_DEFAULT_TIMEOUT=100
43+
python -m pip install -r requirements-$py-win64.txt
4044
pip install -e .
4145
- name: Run tests
4246
run: |
+305
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,305 @@
1+
2+
3+
4+
5+
6+
7+
import os
8+
import sys
9+
import shutil
10+
import numpy as np
11+
import pandas as pd
12+
import matplotlib.pyplot as plt
13+
14+
# Import the classes necessary for structural analysis
15+
from openquake.vmtk.units import units # oq-vtmk units class
16+
from openquake.vmtk.calibration import calibrate_model # oq-vmtk sdof-to-mdof calibration class
17+
from openquake.vmtk.modeller import modeller # oq-vmtk numerical modelling class
18+
from openquake.vmtk.postprocessor import postprocessor # oq-vtmk postprocessing class
19+
from openquake.vmtk.plotter import plotter # oq-vmtk plotting class
20+
from openquake.vmtk.utilities import sorted_alphanumeric, import_from_pkl, export_to_pkl # oq-vmtk utility class
21+
22+
23+
24+
25+
26+
# Define the directory of the ground-motion records
27+
gm_directory = './in/records'
28+
29+
# Define the main output directory
30+
nrha_directory = './out/nltha'
31+
os.makedirs(nrha_directory, exist_ok=True)
32+
33+
# Define directory for temporary analysis outputs: it is used to store temporary .txt files used as accelerations recorders
34+
temp_nrha_directory = os.path.join(nrha_directory,'temp')
35+
os.makedirs(temp_nrha_directory, exist_ok=True)
36+
37+
38+
39+
40+
41+
# Import the intensity measure dictionary (output from example 1)
42+
ims = import_from_pkl(os.path.join(gm_directory, 'imls_esrm20.pkl'))
43+
44+
45+
46+
47+
48+
49+
50+
51+
# Number of storeys
52+
number_storeys = 2
53+
54+
# Relative floor heights list
55+
floor_heights = [2.80, 2.80]
56+
57+
# First-mode based participation factor
58+
gamma = 1.33
59+
60+
# SDOF capacity (First row are Spectral Displacement [m] values - Second row are Spectral Acceleration [g] values)
61+
sdof_capacity = np.array([[0.00060789, 0.00486316, 0.02420000, 0.04353684],
62+
[0.10315200, 0.20630401, 0.12378241, 0.12502023]]).T
63+
# Frame flag
64+
isFrame = False
65+
66+
# Soft-storey mechanism flag
67+
isSOS = False
68+
69+
# Degradation flag
70+
mdof_degradation = True
71+
72+
# Inherent damping
73+
mdof_damping = 0.05
74+
75+
76+
77+
78+
79+
# Intensity measures to use for postprocessing cloud analyses
80+
IMTs = ['PGA', 'SA(0.3s)', 'SA(0.6s)', 'SA(1.0s)','AvgSA']
81+
82+
# Damage thresholds (maximum peak storey drift values in rad)
83+
damage_thresholds = [0.00150, 0.00545, 0.00952, 0.0135]
84+
85+
# The lower limit to be applied for censoring edp values (below 0.1 the minimum threshold for slight damage is considered a negligible case)
86+
lower_limit = 0.1*damage_thresholds[0]
87+
88+
# The upper limit to be applied for consoring edp values (above 1.5 the maximum threshold is considered a collapse case)
89+
censored_limit = 1.5*damage_thresholds[-1]
90+
91+
# Define consequence model to relate structural damage to a decision variable (i.e., expected loss ratio)
92+
consequence_model = [0.05, 0.20, 0.60, 1.00] # damage-to-loss ratios
93+
94+
95+
96+
97+
98+
99+
100+
101+
102+
103+
104+
# Calibrate the model using the Lu et al. (2020) method
105+
floor_masses, storey_disps, storey_forces, mdof_phi = calibrate_model(number_storeys, gamma, sdof_capacity, isFrame, isSOS)
106+
107+
print('The mass of each floor (in tonnes):', floor_masses)
108+
print('The first-mode shape used for calibration:', mdof_phi)
109+
110+
# Plot the capacities to visualise the outcome of the calibration
111+
for i in range(storey_disps.shape[0]):
112+
plt.plot(np.concatenate(([0.0], storey_disps[i,:])), np.concatenate(([0.0], storey_forces[i,:]*9.81)), label = f'Storey #{i+1}')
113+
plt.plot(np.concatenate(([0.0], sdof_capacity[:,0])), np.concatenate(([0.0], sdof_capacity[:,1]*9.81)), label = 'SDOF Capacity')
114+
plt.xlabel('Storey Deformation [m]', fontsize= 16)
115+
plt.ylabel('Storey Shear [kN]', fontsize = 16)
116+
plt.legend(loc = 'lower right')
117+
plt.grid(visible=True, which='major')
118+
plt.grid(visible=True, which='minor')
119+
plt.xlim([0.00, 0.03])
120+
plt.show()
121+
122+
123+
124+
125+
126+
# Initialise MDOF storage lists
127+
conv_index_list = [] # List for convergence indices
128+
peak_disp_list = [] # List for peak floor displacement (returns all peak values along the building height)
129+
peak_drift_list = [] # List for peak storey drift (returns all peak values along the building height)
130+
peak_accel_list = [] # List for peak floor acceleration (returns all peak values along the building height)
131+
max_peak_drift_list = [] # List for maximum peak storey drift (returns the maximum value)
132+
max_peak_drift_dir_list = [] # List for maximum peak storey drift directions
133+
max_peak_drift_loc_list = [] # List for maximum peak storey drift locations
134+
max_peak_accel_list = [] # List for maximum peak floor acceleration (returns the maximum value)
135+
max_peak_accel_dir_list = [] # List for maximum peak floor acceleration directions
136+
max_peak_accel_loc_list = [] # List for maximum peak floor acceleration locations
137+
138+
# Loop over ground-motion records, compile MDOF model and run NLTHA
139+
gmrs = sorted_alphanumeric(os.listdir(os.path.join(gm_directory,'acc'))) # Sort the ground-motion records alphanumerically
140+
dts = sorted_alphanumeric(os.listdir(os.path.join(gm_directory,'dts'))) # Sort the ground-motion time-step files alphanumerically
141+
142+
# Run the analysis
143+
for i in range(len(gmrs)):
144+
### Print post-processing iteration
145+
print('================================================================')
146+
print('============== Analysing: {:d} out of {:d} =================='.format(i+1, len(gmrs)))
147+
print('================================================================')
148+
149+
### Compile the MDOF model
150+
model = modeller(number_storeys,
151+
floor_heights,
152+
floor_masses,
153+
storey_disps,
154+
storey_forces*units.g,
155+
mdof_degradation) # Initialise the class (Build the model)
156+
157+
model.compile_model() # Compile the MDOF model
158+
159+
if i==0:
160+
model.plot_model() # Visualise the model (only on first iteration)
161+
model.do_gravity_analysis() # Do gravity analysis
162+
163+
if number_storeys == 1:
164+
num_modes = 1
165+
else:
166+
num_modes = 3
167+
T, phi = model.do_modal_analysis(num_modes = num_modes) # Do modal analysis and get period of vibration (Essential step for running NLTHA)
168+
169+
### Define ground motion objects
170+
fnames = [os.path.join(gm_directory,'acc',f'{gmrs[i]}')] # Ground-motion record names
171+
fdts = os.path.join(gm_directory,'dts',f'{dts[i]}') # Ground-motion time-step names
172+
dt_gm = pd.read_csv(fdts, header=None)[pd.read_csv(fdts,header=None).columns[0]].loc[1]-\
173+
pd.read_csv(fdts, header=None)[pd.read_csv(fdts,header=None).columns[0]].loc[0] # Ground-motion time-step
174+
t_max = pd.read_csv(fdts)[pd.read_csv(fdts).columns[0]].iloc[-1] # Ground-motion duration
175+
176+
### Define analysis params and do NLTHA
177+
dt_ansys = dt_gm # Set the analysis time-step
178+
sf = units.g # Set the scaling factor (if records are in g, a scaling factor of 9.81 m/s2 must be used to be consistent with opensees)
179+
control_nodes, conv_index, peak_drift, peak_accel, max_peak_drift, max_peak_drift_dir, max_peak_drift_loc, max_peak_accel, max_peak_accel_dir, max_peak_accel_loc, peak_disp = model.do_nrha_analysis(fnames,
180+
dt_gm,
181+
sf,
182+
t_max,
183+
dt_ansys,
184+
temp_nrha_directory,
185+
pflag=False,
186+
xi = mdof_damping)
187+
188+
### Store the analysis
189+
conv_index_list.append(conv_index)
190+
peak_drift_list.append(peak_drift)
191+
peak_accel_list.append(peak_accel)
192+
peak_disp_list.append(peak_disp)
193+
max_peak_drift_list.append(max_peak_drift)
194+
max_peak_drift_dir_list.append(max_peak_drift_dir)
195+
max_peak_drift_loc_list.append(max_peak_drift_loc)
196+
max_peak_accel_list.append(max_peak_accel)
197+
max_peak_accel_dir_list.append(max_peak_accel_dir)
198+
max_peak_accel_loc_list.append(max_peak_accel_loc)
199+
200+
# Remove the temporary directory
201+
shutil.rmtree(f'{temp_nrha_directory}')
202+
203+
# Store the analysis results in a dictionary
204+
ansys_dict = {}
205+
labels = ['T','control_nodes', 'conv_index_list',
206+
'peak_drift_list','peak_accel_list',
207+
'max_peak_drift_list', 'max_peak_drift_dir_list',
208+
'max_peak_drift_loc_list','max_peak_accel_list',
209+
'max_peak_accel_dir_list','max_peak_accel_loc_list',
210+
'peak_disp_list']
211+
212+
for i, label in enumerate(labels):
213+
ansys_dict[label] = vars()[f'{label}']
214+
# Export the analysis output variable to a pickle file using the "export_to_pkl" function from "utilities"
215+
export_to_pkl(os.path.join(nrha_directory,'ansys_out.pkl'), ansys_dict)
216+
217+
print('ANALYSIS COMPLETED!')
218+
219+
220+
221+
222+
223+
# Initialise the postprocessor class
224+
pp = postprocessor()
225+
226+
# Initialise the plotter class
227+
pl = plotter()
228+
229+
# Loop over the intensity measure types and perform cloud regression to fit the probabilistic seismic demand-capacity model
230+
for _, current_imt in enumerate(IMTs):
231+
232+
# Import the current intensity measure type
233+
imls = ims[f'{current_imt}']
234+
235+
# Import the engineering demand parameters (i.e., mpsd) from the analysis dictionary (processed from example 2)
236+
edps = ansys_dict['max_peak_drift_list']
237+
238+
# Process cloud analysis results using the "do_cloud_analysis" function called from "postprocessor"
239+
# The output will be automatically stored in a dictionary
240+
cloud_dict = pp.do_cloud_analysis(imls,
241+
edps,
242+
damage_thresholds,
243+
lower_limit,
244+
censored_limit)
245+
246+
## Visualise the cloud analysis results
247+
pl.plot_cloud_analysis(cloud_dict,
248+
output_directory = None,
249+
plot_label = f'cloud_analysis_{current_imt}',
250+
xlabel = f'{current_imt} [g]',
251+
ylabel = r'Maximum Peak Storey Drift, $\theta_{max}$ [%]') # The y-axis values of drift are converted to % automatically by the plotter
252+
253+
## Visualise the fragility functions
254+
pl.plot_fragility_analysis(cloud_dict,
255+
output_directory = None,
256+
plot_label = f'fragility_{current_imt}',
257+
xlabel = f'{current_imt}')
258+
259+
## Visualise the seismic demands
260+
pl.plot_demand_profiles(ansys_dict['peak_drift_list'],
261+
ansys_dict['peak_accel_list'],
262+
ansys_dict['control_nodes'],
263+
output_directory = None,
264+
plot_label="seismic_demand_profiles") # The y-axis values of drift and acceleration are converted to % and g automatically by the plotter
265+
266+
## Visualise the entire set of results using subplots
267+
pl.plot_ansys_results(cloud_dict,
268+
ansys_dict['peak_drift_list'],
269+
ansys_dict['peak_accel_list'],
270+
ansys_dict['control_nodes'],
271+
output_directory = None,
272+
plot_label = f'analysis_output_{current_imt}',
273+
cloud_xlabel = f'{current_imt}',
274+
cloud_ylabel = r'Maximum Peak Storey Drift, $\theta_{max}$ [%]')
275+
276+
277+
278+
279+
280+
# In this example, since the latest iteration of the previous cell uses 'AvgSA' as the intensity measure,
281+
# then all variables stored inside the "cloud_dict" dictionary correspond to that same IM. Hence,
282+
# the vulnerability function derived here will represent the continuous relationship of the expected
283+
# structural loss ratio conditioned on increasing levels of ground-shaking expressed in terms of the
284+
# average spectral acceleration (in g)
285+
286+
structural_vulnerability = pp.get_vulnerability_function(cloud_dict['poes'],
287+
consequence_model,
288+
uncertainty=True)
289+
290+
291+
# Plot the structural vulnerability function
292+
pl.plot_vulnerability_analysis(structural_vulnerability['IMLs'],
293+
structural_vulnerability['Loss'],
294+
structural_vulnerability['COV'],
295+
'SA(1.0s)',
296+
'Structural Loss Ratio',
297+
output_directory = None,
298+
plot_label = 'Structural Vulnerability')
299+
300+
301+
# The output is a DataFrame with three keys: IMLs (i.e., intensity measure levels), Loss and COV
302+
print(structural_vulnerability)
303+
304+
305+

0 commit comments

Comments
 (0)