Skip to content

Commit 41d1216

Browse files
authored
Merge pull request #15532 from cxp484/FireX
FireX: Merge with firemodels/master
2 parents b50d718 + c23b00c commit 41d1216

29 files changed

+254
-543
lines changed

Manuals/FDS_Verification_Guide/FDS_Verification_Guide.tex

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4123,7 +4123,7 @@ \section{Combustion Load Balancing (\ct{comb\_load\_bal})}
41234123
\includegraphics[height=2.15in]{SCRIPT_FIGURES/EDC_load_bal_Methane_Smooke_TOT} &
41244124
\includegraphics[height=2.15in]{SCRIPT_FIGURES/EDC_load_bal_Methane_Smooke_DEVC}
41254125
\end{tabular*}
4126-
\caption[Results of the \ct{comb\_load\_balance} test cases]{Combustion load balance case for mixing with detailed chemistry using Methane\_Smooke detailed chemical mechanism. The initial unmixed fraction is 1.0 and mixing time is 0.01 s.}
4126+
\caption[Results of the \ct{comb\_load\_balance} test cases]{Combustion load balance case for mixing with detailed chemistry using Methane\_Smooke detailed chemical mechanism. The initial unmixed fraction is 0.98.}
41274127
\label{fig:EDC_load_bal_methane_smooke}
41284128
\end{figure}
41294129

Utilities/Matlab/FDS_verification_dataplot_inputs.csv

Lines changed: 74 additions & 74 deletions
Large diffs are not rendered by default.

Utilities/Python/fdsplotlib.py

Lines changed: 126 additions & 246 deletions
Large diffs are not rendered by default.
Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,51 +1,43 @@
1-
"""
2-
McGrattan
3-
10-29-2019
4-
Crown_Fires.py
51

6-
Read the Crown_Fires *_cat_devc.csv files and determine the rate of spread based on the time history of front position.
7-
Write the results to a file that will be plotted via dataplot.m.
8-
"""
2+
# Read the Crown_Fires *_cat_devc.csv files and determine the rate of spread based on the time history of front position.
3+
# Write the results to a file that will be plotted via dataplot.py.
94

105
import numpy as np
116
import pandas as pd
127
import os
138

14-
# Parameters
159
outdir = '../../../out/Crown_Fires/'
1610

17-
# Get only *_cat_devc.csv files (like MATLAB dir)
1811
file_list = [f for f in os.listdir(outdir) if f.endswith('_cat_devc.csv')]
19-
file_list.sort() # optional, to keep consistent ordering with MATLAB
12+
file_list.sort()
2013

2114
wind_speed = []
2215
slope = []
2316

2417
for fname in file_list:
2518
full_path = os.path.join(outdir, fname)
2619

27-
# Read CSV (skip 2 header rows like MATLAB importdata)
28-
M = pd.read_csv(full_path, skiprows=2)
20+
M = pd.read_csv(full_path, skiprows=2, header=None)
2921
M_data = M.to_numpy()
3022

31-
# Extract rows satisfying conditions (700<=col2<=900, 30<col1<300)
23+
# Extract rows satisfying conditions (700<=x<=900, 30<Time<300)
3224
indices = np.where(
3325
(M_data[:, 1] >= 700) &
3426
(M_data[:, 1] <= 900) &
3527
(M_data[:, 0] > 30) &
3628
(M_data[:, 0] < 300)
3729
)[0]
3830

39-
# Mean wind speed (col3)
31+
# Mean wind speed (U10)
4032
wind_speed.append(np.mean(M_data[indices, 2]))
4133

42-
# Polyfit slope for col2 vs col1
34+
# Polyfit slope for x vs Time
4335
p = np.polyfit(M_data[indices, 0], M_data[indices, 1], 1)
4436
slope.append(p[0])
4537

46-
# Write output file
4738
with open(os.path.join(outdir, 'ROS.csv'), 'w') as fid:
4839
fid.write('km/h,m/min\n')
4940
fid.write('U,ROS\n')
5041
for u, s in zip(wind_speed, slope):
5142
fid.write(f'{3.6*u:4.1f},{60*s:6.2f}\n')
43+

Utilities/Python/scripts/FHWA_Tunnel.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11

2+
# For each experiment, make a contour plot of the extent of a single temperature contour at each time during the experiment
3+
24
import numpy as np
35
import pandas as pd
46
import matplotlib.pyplot as plt
@@ -21,8 +23,6 @@
2123
single_level = [50]
2224
setpoint = [10000, 400, 399, 338, 240, 322, 390, 420, 360, 10000, 10000]
2325

24-
# For each experiment, make a contour plot of the extent of a single temperature contour at each time during the experiment
25-
2626
for k in range(11): # Experiments
2727

2828
fig = fdsplotlib.plot_to_fig(x_data=[5.5,5.5], y_data=[0,15], marker_style='k--',

Utilities/Python/scripts/Heskestad_Flame_Height_2.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11

2+
# Plot results of the Heskestad_Flame_Height cases against various experimental correlations
3+
24
import numpy as np
35
import matplotlib.pyplot as plt
46
import pandas as pd

Utilities/Python/scripts/ashrae_7.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,4 +77,3 @@
7777
fid.write('\\end{tabular}\n')
7878
fid.write('\\end{center}\n')
7979

80-
print(f"\n? Successfully generated LaTeX table in {texname}")

Utilities/Python/scripts/burke_schumann.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,6 @@
160160
# Create DataFrame for easy CSV writing with header
161161
df_expected = pd.DataFrame(burke_expected, columns=header1_expected)
162162
df_expected.to_csv(base_dir+'burke_schumann_expected.csv', index=False)
163-
print(f"Written expected data to burke_schumann_expected.csv")
164163

165164
# 2. Write FDS Data
166165
burke_FDS = np.zeros((len(mix_frac[0, :]), 5)) # 15 rows, 5 columns
@@ -177,4 +176,3 @@
177176
# Create DataFrame for easy CSV writing with header
178177
df_FDS = pd.DataFrame(burke_FDS, columns=header1_FDS)
179178
df_FDS.to_csv(base_dir+'burke_schumann_FDS.csv', index=False)
180-
print(f"Written FDS data to burke_schumann_FDS.csv")

Utilities/Python/scripts/cat_propane_depo.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,6 @@
7070
D1_final = pd.concat([H1_df, D1]).reset_index(drop=True)
7171

7272
D1_final.to_csv(outdir+'propane_flame_deposition_cat_wall.csv', header=False, index=False)
73-
print(f"Written condensed phase aerosol data to propane_flame_deposition_cat_wall.csv")
7473

7574
# List of files for gas phase aerosol (mass loss)
7675
gas_files = [
@@ -132,7 +131,6 @@
132131

133132
# Write to CSV
134133
D2_final.to_csv(outdir+'propane_flame_deposition_cat_gas.csv', header=False, index=False)
135-
print(f"Written gas phase aerosol data to propane_flame_deposition_cat_gas.csv")
136134

137135
# Create DataFrame D3
138136
D3 = pd.DataFrame({
@@ -156,4 +154,3 @@
156154

157155
# Write to CSV
158156
D3_final.to_csv(outdir+ 'propane_flame_deposition_cat_total.csv', header=False, index=False)
159-
print(f"Written total aerosol data to propane_flame_deposition_cat_total.csv")

Utilities/Python/scripts/flame_species.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -49,20 +49,13 @@
4949

5050
header_lp = tuple(h_mf_p.iloc[0,:]) + tuple(h_mf_l.iloc[0,1:])
5151

52-
print(header_lp)
53-
5452
data_combined_lp = pd.concat([mf_p, mf_l.iloc[:, 1:4]], axis=1)
5553
data_combined_lp.columns = header_lp
5654

57-
print(data_combined_lp)
58-
5955
header_ml = tuple(h_mf_p_2.iloc[0,:]) + tuple(h_mf_l_f.iloc[0,1:3]) + tuple(h_mf_l_o.iloc[0,1:3])
60-
print(header_ml)
6156
data_combined_ml = pd.concat([mf_p_2, mf_l_f.iloc[:, 1:3], mf_l_o.iloc[:, 1:3]], axis=1)
6257
data_combined_ml.columns = header_ml
6358

64-
print(data_combined_ml)
65-
6659
output_file_1 = outdir+'methane_flame_lumpedprimitive.csv'
6760

6861
# The MATLAB script uses a hardcoded units line for the final output.
@@ -76,7 +69,6 @@
7669

7770
# Write to CSV without the pandas index or automatically generated header
7871
df_out_1.to_csv(output_file_1, header=False, index=False)
79-
print(f"Successfully wrote combined data to {output_file_1}")
8072

8173
output_file_2 = outdir+'methane_flame_multilumped.csv'
8274

@@ -88,4 +80,3 @@
8880

8981
# Write to CSV without the pandas index or automatically generated header
9082
df_out_2.to_csv(output_file_2, header=False, index=False)
91-
print(f"Successfully wrote multi-lumped data to {output_file_2}")

0 commit comments

Comments
 (0)