Skip to content

Commit 30281c7

Browse files
committed
[JTH] warning to info in normalize and hide warnings in estela predictr calculation
1 parent 429b4d2 commit 30281c7

File tree

4 files changed

+25
-3
lines changed

4 files changed

+25
-3
lines changed

bluemath_tk/core/operations.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def normalize(
9595
if custom_scale_factor.get(data_var):
9696
if custom_scale_factor.get(data_var)[0] > data_var_min:
9797
if logger is not None:
98-
logger.warning(
98+
logger.info(
9999
f"Proposed min custom scaler for {data_var} is bigger than datapoint" # , using smallest datapoint
100100
)
101101
else:
@@ -107,7 +107,7 @@ def normalize(
107107
data_var_min = custom_scale_factor.get(data_var)[0]
108108
if custom_scale_factor.get(data_var)[1] < data_var_max:
109109
if logger is not None:
110-
logger.warning(
110+
logger.info(
111111
f"Proposed max custom scaler for {data_var} is lower than datapoint" # , using biggest datapoint
112112
)
113113
else:

bluemath_tk/predictor/xwt.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import warnings
23
from datetime import datetime, timedelta
34
from typing import Any, Dict
@@ -28,6 +29,7 @@ def get_dynamic_estela_predictor(
2829
data: xr.Dataset,
2930
estela: xr.Dataset,
3031
check_interpolation: bool = True,
32+
verbose: bool = False,
3133
) -> xr.Dataset:
3234
"""
3335
Transform an xarray dataset of longitude, latitude, and time into one where
@@ -45,13 +47,29 @@ def get_dynamic_estela_predictor(
4547
The dataset containing the F values with dimensions longitude and latitude.
4648
check_interpolation : bool, optional
4749
Whether to check if the data is interpolated. Default is True.
50+
verbose : bool, optional
51+
Whether to print verbose output. Default is False.
52+
If False, Dask logs are suppressed.
53+
If True, Dask logs are shown.
4854
4955
Returns
5056
-------
5157
xr.Dataset
5258
The transformed dataset.
5359
"""
5460

61+
if not verbose:
62+
# Suppress Dask logs
63+
logging.getLogger("distributed").setLevel(logging.ERROR)
64+
logging.getLogger("distributed.client").setLevel(logging.ERROR)
65+
logging.getLogger("distributed.scheduler").setLevel(logging.ERROR)
66+
logging.getLogger("distributed.worker").setLevel(logging.ERROR)
67+
logging.getLogger("distributed.nanny").setLevel(logging.ERROR)
68+
# Also suppress bokeh and tornado logs that Dask uses
69+
logging.getLogger("bokeh").setLevel(logging.ERROR)
70+
logging.getLogger("tornado").setLevel(logging.ERROR)
71+
72+
# TODO: Add customization for dask client
5573
_dask_client = setup_dask_client(n_workers=4, memory_limit=0.25)
5674

5775
if check_interpolation:

bluemath_tk/teslakit/climate_emulator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1175,7 +1175,7 @@ def GenerateWaves(
11751175
# solve normal inverse CDF for each active chromosome
11761176
ipbs = 0 # prob_sim aux. index
11771177
sim_row = np.zeros(srl)
1178-
for i_c in np.where(crm == 1)[0]:
1178+
for i_c in np.where(np.atleast_1d(crm) == 1)[0]:
11791179
# random sampled GEV
11801180
rd = np.random.randint(0, len(xds_GEV_Par_Sampled.simulation))
11811181
xds_GEV_Par = xds_GEV_Par_Sampled.isel(simulation=rd)

bluemath_tk/waves/binwaves.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,10 @@ def reconstruc_spectra(
141141
The memory limit to use. Default is 0.5.
142142
chunk_sizes : dict, optional
143143
The chunk sizes to use. Default is {"time": 24}.
144+
verbose : bool, optional
145+
Whether to print verbose output. Default is False.
146+
If False, Dask logs are suppressed.
147+
If True, Dask logs are shown.
144148
145149
Returns
146150
-------

0 commit comments

Comments
 (0)