Skip to content

Commit a526c58

Browse files
committed
[JTH] first binwaves version implemented
1 parent 4d74873 commit a526c58

File tree

4 files changed

+296
-5
lines changed

4 files changed

+296
-5
lines changed

bluemath_tk/waves/binwaves.py

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
import numpy as np
2+
import xarray as xr
3+
4+
5+
def transform_CAWCR_WS(ds):
6+
ds = ds.rename({"frequency": "freq", "direction": "dir"})
7+
ds["efth"] = ds["efth"] * np.pi / 180.0
8+
ds["dir"] = ds["dir"] - 180.0
9+
ds["dir"] = np.where(ds["dir"] < 0, ds["dir"] + 360, ds["dir"])
10+
return ds
11+
12+
13+
def process_kp_coefficients(
14+
swan_ds: xr.Dataset,
15+
spectrum_freq: np.ndarray,
16+
spectrum_dir: np.ndarray,
17+
latitude: float,
18+
longitude: float,
19+
):
20+
"""
21+
This function processes the propagation coefficients for all the grid points within the
22+
SWAN simulation output (out_sim).
23+
It takes a long time to run but it only needs to be done once per location
24+
25+
p_store_kp - location to save all the kp coefficients. One file per location
26+
out_sim - output from SWAN simulations. Dimensions: cases x lat x lon.
27+
variables: Hs_part, Tp_part, Dir_part
28+
spectrum - spectra dataset (freq and dir coordinates)
29+
override - bool, True for re-calculate already solved points
30+
rotated_mesh - if True looks for closer location in Xp and Yp
31+
"""
32+
33+
kp_matrix = np.full(
34+
[
35+
len(swan_ds.case_num),
36+
len(spectrum_freq),
37+
len(spectrum_dir),
38+
],
39+
0.0,
40+
)
41+
42+
swan_point = swan_ds.sel(
43+
Xp=longitude,
44+
Yp=latitude,
45+
method="nearest",
46+
)
47+
# TODO: Check if this is the correct way to handle NaN values
48+
if any(np.isnan(swan_point["TPsmoo"].values)):
49+
raise ValueError("NaN values found for variable TPsmoo_part")
50+
51+
# Tp mask
52+
swan_point_cut = swan_point[["Hsig", "TPsmoo", "Dir"]].where(
53+
swan_point["TPsmoo"] > 0,
54+
drop=True,
55+
)
56+
57+
# get k,f,d
58+
kfd = xr.Dataset(
59+
{
60+
"k": swan_point_cut.Hsig,
61+
"f": 1 / swan_point_cut.TPsmoo,
62+
"d": swan_point_cut.Dir,
63+
}
64+
)
65+
66+
# fill kp
67+
for case_num in kfd.case_num.values:
68+
kfd_c = kfd.sel(case_num=case_num)
69+
70+
# get k,f,d and clean nans
71+
k = kfd_c.k.values
72+
f = kfd_c.f.values
73+
d = kfd_c.d.values
74+
75+
k = k[~np.isnan(f)]
76+
d = d[~np.isnan(f)]
77+
f = f[~np.isnan(f)]
78+
79+
# set case kp at point
80+
for c in range(len(f)):
81+
i = np.argmin(np.abs(spectrum_freq - f[c]))
82+
j = np.argmin(np.abs(spectrum_dir - d[c]))
83+
kp_matrix[case_num, i, j] = k[c]
84+
85+
# prepare output dataset
86+
return xr.Dataset(
87+
{
88+
"kp": (["case", "freq", "dir"], kp_matrix),
89+
"swan_freqs": (["case"], 1.0 / swan_point_cut.TPsmoo),
90+
"swan_dirs": (["case"], swan_point_cut.Dir),
91+
},
92+
coords={
93+
"case": swan_ds.case_num,
94+
"freq": spectrum_freq,
95+
"dir": spectrum_dir,
96+
},
97+
)
98+
99+
100+
def reconstruc_spectra(
101+
spectra_ds: xr.Dataset,
102+
kp_coeffs: xr.Dataset,
103+
):
104+
EFTH = np.full(
105+
np.shape(spectra_ds.efth.values),
106+
0,
107+
)
108+
109+
for case in range(len(kp_coeffs.case)):
110+
freq_, dir_ = (
111+
kp_coeffs.isel(case=case).swan_freqs.values,
112+
kp_coeffs.isel(case=case).swan_dirs.values,
113+
)
114+
efth_case = spectra_ds.sel(freq=freq_, dir=dir_, method="nearest")
115+
kp_case = kp_coeffs.sortby("dir").isel(case=case)
116+
117+
EFTH = EFTH + (efth_case.efth * kp_case.kp**2).values
118+
119+
# ns_sp = off_sp.drop(("Wspeed", "Wdir", "Depth")).copy()
120+
121+
return xr.Dataset(
122+
{
123+
"efth": (["time", "freq", "dir"], EFTH),
124+
},
125+
coords={
126+
"time": spectra_ds.time,
127+
"freq": spectra_ds.freq,
128+
"dir": spectra_ds.dir,
129+
},
130+
)

bluemath_tk/wrappers/swan/swan_example.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import xarray as xr
2+
23
from bluemath_tk.topo_bathy.swan_grid import generate_grid_parameters
4+
from bluemath_tk.waves.binwaves import process_kp_coefficients, transform_CAWCR_WS
35
from bluemath_tk.wrappers.swan.swan_wrapper import SwanModelWrapper
46

5-
67
# Usage example
78
if __name__ == "__main__":
89
# Load GEBCO bathymetry
@@ -18,7 +19,7 @@
1819
model_parameters = (
1920
xr.open_dataset("/home/tausiaj/GitHub-GeoOcean/BlueMath/test_data/subset.nc")
2021
.to_dataframe()
21-
.iloc[:100]
22+
.iloc[:10]
2223
.to_dict(orient="list")
2324
)
2425
# Create an instance of the SWAN model wrapper
@@ -32,5 +33,22 @@
3233
# List available launchers
3334
print(swan_wrapper.list_available_launchers())
3435
# Run the model
35-
swan_wrapper.run_cases(launcher="docker", parallel=True)
36-
swan_wrapper.run_cases_bulk("sbatch javi_slurm.sh")
36+
# swan_wrapper.run_cases(launcher="docker", parallel=True)
37+
# Post-process the output files
38+
postprocessed_ds = swan_wrapper.postprocess_cases()
39+
print(postprocessed_ds)
40+
# Load spectra example
41+
spectra = xr.open_dataset(
42+
"/home/tausiaj/GitHub-GeoOcean/BlueMath/test_data/Waves_Cantabria_356.08_43.82.nc"
43+
)
44+
spectra_transformed = transform_CAWCR_WS(spectra)
45+
# Extract binwaves kp coeffs
46+
kp_coeffs = process_kp_coefficients(
47+
swan_ds=postprocessed_ds,
48+
spectrum_freq=spectra_transformed.freq.values,
49+
spectrum_dir=spectra_transformed.dir.values,
50+
latitude=43.3,
51+
longitude=173.0,
52+
)
53+
print(kp_coeffs)
54+
# Reconstruct spectra with kp coeffs

bluemath_tk/wrappers/swan/swan_wrapper.py

Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
import os
2+
from typing import List
3+
import xarray as xr
4+
import scipy.io as sio
15
from .._base_wrappers import BaseModelWrapper
26

37

@@ -26,6 +30,37 @@ class SwanModelWrapper(BaseModelWrapper):
2630
"docker": "docker run --rm -v .:/case_dir -w /case_dir tausiaj/swan-geoocean:41.51 swanrun -input input",
2731
}
2832

33+
output_variables = {
34+
"Depth": {
35+
"long_name": "Water depth at the point",
36+
"units": "m",
37+
},
38+
"Hsig": {
39+
"long_name": "Significant wave height",
40+
"units": "m",
41+
},
42+
"Tm02": {
43+
"long_name": "Mean wave period",
44+
"units": "s",
45+
},
46+
"Dir": {
47+
"long_name": "Wave direction",
48+
"units": "degrees",
49+
},
50+
"PkDir": {
51+
"long_name": "Peak wave direction",
52+
"units": "degrees",
53+
},
54+
"TPsmoo": {
55+
"long_name": "Peak wave period",
56+
"units": "s",
57+
},
58+
"Dspr": {
59+
"long_name": "Directional spread",
60+
"units": "degrees",
61+
},
62+
}
63+
2964
def __init__(
3065
self,
3166
templates_dir: str,
@@ -48,3 +83,111 @@ def __init__(
4883
self.set_logger_name(
4984
name=self.__class__.__name__, level="DEBUG" if debug else "INFO"
5085
)
86+
87+
def list_available_output_variables(self) -> List[str]:
88+
"""
89+
List available output variables.
90+
91+
Returns
92+
-------
93+
List[str]
94+
The available output variables.
95+
"""
96+
97+
return list(self.output_variables.keys())
98+
99+
def _convert_case_output_files_to_nc(
100+
self, case_num: int, output_path: str, output_vars: List[str]
101+
) -> xr.Dataset:
102+
"""
103+
Convert mat file to netCDF file.
104+
105+
Parameters
106+
----------
107+
case_num : int
108+
The case number.
109+
output_path : str
110+
The output path.
111+
output_vars : List[str]
112+
The output variables to use.
113+
114+
Returns
115+
-------
116+
xr.Dataset
117+
The xarray Dataset.
118+
"""
119+
120+
# Read mat file
121+
output_dict = sio.loadmat(output_path)
122+
123+
# Create Dataset
124+
ds_output_dict = {var: (("Yp", "Xp"), output_dict[var]) for var in output_vars}
125+
ds = xr.Dataset(
126+
ds_output_dict,
127+
coords={"Xp": output_dict["Xp"][0, :], "Yp": output_dict["Yp"][:, 0]},
128+
)
129+
130+
# assign correct coordinate case_num
131+
ds.coords["case_num"] = case_num
132+
133+
return ds
134+
135+
def postprocess_case(
136+
self, case_num: int, case_dir: str, output_vars: List[str] = None
137+
) -> xr.Dataset:
138+
"""
139+
Convert mat ouput files to netCDF file.
140+
141+
Parameters
142+
----------
143+
case_num : int
144+
The case number.
145+
case_dir : str
146+
The case directory.
147+
output_vars : list, optional
148+
The output variables to postprocess. Default is None.
149+
150+
Returns
151+
-------
152+
xr.Dataset
153+
The postprocessed Dataset.
154+
"""
155+
156+
if output_vars is None:
157+
self.logger.info("Postprocessing all available variables.")
158+
output_vars = list(self.output_variables.keys())
159+
160+
output_nc_path = os.path.join(case_dir, "output.nc")
161+
if not os.path.exists(output_nc_path):
162+
# Convert tab files to netCDF file
163+
output_path = os.path.join(case_dir, "output_main.mat")
164+
output_nc = self._convert_case_output_files_to_nc(
165+
case_num=case_num,
166+
output_path=output_path,
167+
output_vars=output_vars,
168+
)
169+
output_nc.to_netcdf(os.path.join(case_dir, "output.nc"))
170+
else:
171+
self.logger.info("Reading existing output.nc file.")
172+
output_nc = xr.open_dataset(output_nc_path)
173+
174+
return output_nc
175+
176+
def join_postprocessed_files(
177+
self, postprocessed_files: List[xr.Dataset]
178+
) -> xr.Dataset:
179+
"""
180+
Join postprocessed files in a single Dataset.
181+
182+
Parameters
183+
----------
184+
postprocessed_files : list
185+
The postprocessed files.
186+
187+
Returns
188+
-------
189+
xr.Dataset
190+
The joined Dataset.
191+
"""
192+
193+
return xr.concat(postprocessed_files, dim="case_num")

bluemath_tk/wrappers/swash/swash_wrapper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def _convert_case_output_files_to_nc(
166166
# merge output files to one xarray.Dataset
167167
ds = xr.merge([ds_ouput, ds_run], compat="no_conflicts")
168168

169-
# assign correct coordinate case_id
169+
# assign correct coordinate case_num
170170
ds.coords["case_num"] = case_num
171171

172172
return ds

0 commit comments

Comments
 (0)