Skip to content

Commit a0cdcbc

Browse files
authored
Merge pull request #105 from GeoOcean/feature/hybeat_apia
Feature/hybeat apia
2 parents 218c17b + e7f7f1b commit a0cdcbc

File tree

1 file changed

+221
-1
lines changed

1 file changed

+221
-1
lines changed

bluemath_tk/wrappers/xbeach/xbeach_wrapper.py

Lines changed: 221 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1+
import math
2+
import os
3+
from typing import List, Union
4+
5+
import numpy as np
6+
import pandas as pd
7+
import xarray as xr
8+
19
from .._base_wrappers import BaseModelWrapper
210

311

@@ -14,7 +22,18 @@ class XBeachModelWrapper(BaseModelWrapper):
1422
The available launchers for the wrapper.
1523
"""
1624

17-
default_parameters = {}
25+
default_parameters = {
26+
"comptime": {
27+
"type": int,
28+
"value": 3600,
29+
"description": "The computational time.",
30+
},
31+
"wbctype": {
32+
"type": str,
33+
"value": "off",
34+
"description": "The time step for the simulation.",
35+
},
36+
}
1837

1938
available_launchers = {
2039
"geoocean-cluster": "launchXbeach.sh",
@@ -44,3 +63,204 @@ def __init__(
4463
self.set_logger_name(
4564
name=self.__class__.__name__, level="DEBUG" if debug else "INFO"
4665
)
66+
67+
def build_case(
68+
self,
69+
case_context: dict,
70+
case_dir: str,
71+
) -> None:
72+
"""
73+
Build the input files for a case.
74+
75+
Parameters
76+
----------
77+
case_context : dict
78+
The case context.
79+
case_dir : str
80+
The case directory.
81+
"""
82+
83+
if case_context["wbctype"] == "jonstable":
84+
with open(f"{case_dir}/jonswap.txt", "w") as f:
85+
for _i in range(math.ceil(case_context["comptime"] / 3600)):
86+
f.write(
87+
f"{case_context['Hs']} {case_context['Tp']} {case_context['Dir']} 3.300000 30.000000 3600.000000 1.000000 \n"
88+
)
89+
90+
def _get_average_var(self, case_nc: xr.Dataset, var: str) -> np.ndarray:
91+
"""
92+
Get the average value of a variable except for the first hour of the simulation
93+
94+
Parameters
95+
----------
96+
case_nc : xr.Dataset
97+
Simulation .nc file.
98+
var : str
99+
Variable of interest.
100+
101+
Returns
102+
-------
103+
np.ndarray
104+
The average value of the variable.
105+
"""
106+
107+
if var in case_nc:
108+
return np.mean(
109+
case_nc[var]
110+
.isel(meantime=slice(1, int(case_nc.meantime.values[-1])))
111+
.values,
112+
axis=0,
113+
)
114+
115+
def _get_max_var(self, case_nc: xr.Dataset, var: str) -> np.ndarray:
116+
"""
117+
Get the Max value of a variable except for the first hour of the simulation
118+
119+
Parameters
120+
----------
121+
case_nc : xr.Dataset
122+
Simulation .nc file.
123+
var : str
124+
Variable of interest.
125+
126+
Returns
127+
-------
128+
np.ndarray
129+
The max value of the variable.
130+
"""
131+
132+
if var in case_nc:
133+
return np.max(
134+
case_nc[var]
135+
.isel(meantime=slice(1, int(case_nc.meantime.values[-1])))
136+
.values,
137+
axis=0,
138+
)
139+
140+
def monitor_cases(self, value_counts: str = None) -> Union[pd.DataFrame, dict]:
141+
"""
142+
Monitor the cases based on different model log files.
143+
"""
144+
145+
cases_status = {}
146+
147+
for case_dir in self.cases_dirs:
148+
case_dir_name = os.path.basename(case_dir)
149+
if os.path.exists(os.path.join(case_dir, "XBlog.txt")):
150+
if (
151+
os.path.exists(os.path.join(case_dir, "XBerror.txt"))
152+
& os.path.getsize(os.path.join(case_dir, "XBerror.txt"))
153+
!= 0
154+
):
155+
cases_status[case_dir_name] = "XBerror.txt"
156+
continue
157+
else:
158+
with open(os.path.join(case_dir, "XBlog.txt"), "r") as f:
159+
lines = f.readlines()[-2:]
160+
161+
if any("End of program xbeach" in line.lower() for line in lines):
162+
cases_status[case_dir_name] = "End of run"
163+
continue
164+
else:
165+
cases_status[case_dir_name] = "Running"
166+
continue
167+
else:
168+
cases_status[case_dir_name] = "No run"
169+
continue
170+
171+
return super().monitor_cases(
172+
cases_status=cases_status, value_counts=value_counts
173+
)
174+
175+
def postprocess_case(
176+
self,
177+
case_num: int,
178+
case_dir: str,
179+
output_vars: List[str] = None,
180+
overwrite_output: bool = True,
181+
) -> xr.Dataset:
182+
"""
183+
Convert tab output files to netCDF file.
184+
185+
Parameters
186+
----------
187+
case_num : int
188+
The case number.
189+
case_dir : str
190+
The case directory.
191+
output_vars : list, optional
192+
The output variables to postprocess. Default is None.
193+
overwrite_output : bool, optional
194+
Overwrite the output.nc file. Default is True.
195+
196+
Returns
197+
-------
198+
xr.Dataset
199+
The postprocessed Dataset.
200+
"""
201+
202+
import warnings
203+
204+
warnings.filterwarnings("ignore")
205+
206+
self.logger.info(f"[{case_num}]: Postprocessing case {case_num} in {case_dir}.")
207+
208+
output_nc_path = os.path.join(case_dir, "xboutput_postprocessed.nc")
209+
if not os.path.exists(output_nc_path) or overwrite_output:
210+
output_raw = xr.open_dataset(os.path.join(case_dir, "xboutput.nc"))
211+
212+
globalx = output_raw.globalx.values
213+
globaly = output_raw.globaly.values
214+
zb = output_raw.zb.values[0]
215+
y = np.arange(globalx.shape[0])
216+
x = np.arange(globalx.shape[1])
217+
218+
ds = xr.Dataset(
219+
{
220+
"globalx": (("y", "x"), globalx),
221+
"globaly": (("y", "x"), globaly),
222+
"zb": (("y", "x"), zb),
223+
},
224+
coords={"y": y, "x": x},
225+
)
226+
227+
for var in output_vars:
228+
if var == "zs_max":
229+
maxed = self._get_max_var(case_nc=output_raw, var=var)
230+
masked = xr.where(ds["zb"] > 0, np.nan, maxed)
231+
ds[var] = (("y", "x"), masked.data)
232+
else:
233+
averaged = self._get_average_var(case_nc=output_raw, var=var)
234+
masked = xr.where(ds["zb"] > 0, np.nan, averaged)
235+
ds[var] = (("y", "x"), masked.data)
236+
237+
ds = ds.drop_vars("zb")
238+
ds.to_netcdf(output_nc_path)
239+
240+
return ds
241+
else:
242+
self.logger.info(
243+
f"[{case_num}]: Reading existing xboutput_postprocessed.nc file."
244+
)
245+
output_nc = xr.open_dataset(output_nc_path)
246+
247+
return output_nc
248+
249+
def join_postprocessed_files(
250+
self, postprocessed_files: List[xr.Dataset]
251+
) -> xr.Dataset:
252+
"""
253+
Join postprocessed files in a single Dataset.
254+
255+
Parameters
256+
----------
257+
postprocessed_files : list
258+
The postprocessed files.
259+
260+
Returns
261+
-------
262+
xr.Dataset
263+
The joined xarray.Dataset.
264+
"""
265+
266+
return xr.concat(postprocessed_files, dim="case_num")

0 commit comments

Comments
 (0)