From c8fed0928e983c1f598e8563fe79f04e67d2f085 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:07:41 +0000 Subject: [PATCH 01/10] Add log_and_raise helper and convert duplicate fre_logger.error/raise patterns Agent-Logs-Url: https://github.com/NOAA-GFDL/fre-cli/sessions/98852929-27ab-48b1-8b84-1787a26e72f3 Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/__init__.py | 19 +++++++ .../generate_time_averages/cdoTimeAverager.py | 4 +- .../generate_time_averages.py | 5 +- fre/cmor/cmor_helpers.py | 24 ++++---- fre/cmor/cmor_mixer.py | 6 +- fre/pp/configure_script_yaml.py | 8 +-- fre/pp/split_netcdf_script.py | 7 +-- fre/tests/test_fre_log_and_raise.py | 56 +++++++++++++++++++ 8 files changed, 103 insertions(+), 26 deletions(-) create mode 100644 fre/tests/test_fre_log_and_raise.py diff --git a/fre/__init__.py b/fre/__init__.py index 3a3702c89..50147a4b9 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -14,3 +14,22 @@ format = FORMAT, filename = None, encoding = 'utf-8' ) + + +def log_and_raise(msg, exc_type=ValueError, exc=None): + """ + Log an error message via fre_logger and raise an exception with the same message. + Avoids the need to duplicate error text in both fre_logger.error() and raise calls. + + :param msg: Error message to log and include in the exception. + :type msg: str + :param exc_type: Exception class to raise. Defaults to ValueError. + :type exc_type: type + :param exc: Optional original exception to chain from (uses ``raise ... from exc``). + :type exc: Exception, optional + :raises exc_type: Always raised with the given message. + """ + fre_logger.error(msg, stacklevel=2) + if exc is not None: + raise exc_type(msg) from exc + raise exc_type(msg) diff --git a/fre/app/generate_time_averages/cdoTimeAverager.py b/fre/app/generate_time_averages/cdoTimeAverager.py index c8f585887..125fbf1f3 100644 --- a/fre/app/generate_time_averages/cdoTimeAverager.py +++ b/fre/app/generate_time_averages/cdoTimeAverager.py @@ -8,6 +8,7 @@ import cdo from cdo import Cdo +from fre import log_and_raise from .timeAverager import timeAverager fre_logger = logging.getLogger(__name__) @@ -32,8 +33,7 @@ def generate_timavg(self, infile = None, outfile = None): """ if self.avg_type not in ['all', 'seas', 'month']: - fre_logger.error('requested unknown avg_type %s.', self.avg_type) - raise ValueError(f'requested unknown avg_type {self.avg_type}') + log_and_raise(f'requested unknown avg_type {self.avg_type}') if self.var is not None: fre_logger.warning('WARNING: variable specification not twr supported for cdo time averaging. ignoring!') diff --git a/fre/app/generate_time_averages/generate_time_averages.py b/fre/app/generate_time_averages/generate_time_averages.py index 2d0f0a874..470597741 100755 --- a/fre/app/generate_time_averages/generate_time_averages.py +++ b/fre/app/generate_time_averages/generate_time_averages.py @@ -11,6 +11,8 @@ from .frenctoolsTimeAverager import frenctoolsTimeAverager from .frepytoolsTimeAverager import frepytoolsTimeAverager +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) def generate_time_average(infile: Union[str, List[str]] = None, @@ -98,8 +100,7 @@ def generate_time_average(infile: Union[str, List[str]] = None, exitstatus = myavger.generate_timavg( infile = infile, outfile = outfile) else: - fre_logger.error('averager is None, check generate_time_average in generate_time_averages.py!') - raise ValueError + log_and_raise('averager is None, check generate_time_average in generate_time_averages.py!') # remove the new merged file if we created it. if merged: diff --git a/fre/cmor/cmor_helpers.py b/fre/cmor/cmor_helpers.py index a7e6c9033..aa3f35d9a 100644 --- a/fre/cmor/cmor_helpers.py +++ b/fre/cmor/cmor_helpers.py @@ -56,6 +56,8 @@ from .cmor_constants import ( ARCHIVE_GOLD_DATA_DIR, CMIP7_GOLD_OCEAN_FILE_STUB, CMIP6_GOLD_OCEAN_FILE_STUB, INPUT_TO_MIP_VERT_DIM ) +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) @@ -457,10 +459,9 @@ def update_grid_and_label( json_file_path: str, .. note:: The function logs before and after values, and overwrites the input file unless an output path is given. """ if None in [new_grid_label, new_grid, new_nom_res]: - fre_logger.error( + log_and_raise( 'grid/grid_label/nom_res updating requested for exp_config file, but one of them is None\n' 'bailing...!') - raise ValueError try: with open(json_file_path, "r", encoding="utf-8") as file: @@ -471,24 +472,24 @@ def update_grid_and_label( json_file_path: str, data["grid"] = new_grid fre_logger.info('Updated "grid": %s', data["grid"]) except KeyError as e: - fre_logger.error("Failed to update 'grid': %s", e) - raise KeyError("Error while updating 'grid'. Ensure the field exists and is modifiable.") from e + log_and_raise("Error while updating 'grid'. Ensure the field exists and is modifiable.", + KeyError, exc=e) try: fre_logger.info('Original "grid_label": %s', data["grid_label"]) data["grid_label"] = new_grid_label fre_logger.info('Updated "grid_label": %s', data["grid_label"]) except KeyError as e: - fre_logger.error("Failed to update 'grid_label': %s", e) - raise KeyError("Error while updating 'grid_label'. Ensure the field exists and is modifiable.") from e + log_and_raise("Error while updating 'grid_label'. Ensure the field exists and is modifiable.", + KeyError, exc=e) try: fre_logger.info('Original "nominal_resolution": %s', data["nominal_resolution"]) data["nominal_resolution"] = new_nom_res fre_logger.info('Updated "nominal_resolution": %s', data["nominal_resolution"]) except KeyError as e: - fre_logger.error("Failed to update 'nominal_resolution': %s", e) - raise KeyError("Error updating 'nominal_resolution'. Ensure the field exists and is modifiable.") from e + log_and_raise("Error updating 'nominal_resolution'. Ensure the field exists and is modifiable.", + KeyError, exc=e) output_file_path = output_file_path or json_file_path @@ -530,10 +531,9 @@ def update_calendar_type( json_file_path: str, .. note:: The function logs before and after values, and overwrites the input file unless an output path is given. """ if new_calendar_type is None: - fre_logger.error( + log_and_raise( 'calendar_type updating requested for exp_config file, but one of them is None\n' 'bailing...!') - raise ValueError try: with open(json_file_path, "r", encoding="utf-8") as file: @@ -544,8 +544,8 @@ def update_calendar_type( json_file_path: str, data["calendar"] = new_calendar_type fre_logger.info('Updated "calendar": %s', data["calendar"]) except KeyError as e: - fre_logger.error("Failed to update 'calendar': %s", e) - raise KeyError("Error while updating 'calendar'. Ensure the field exists and is modifiable.") from e + log_and_raise("Error while updating 'calendar'. Ensure the field exists and is modifiable.", + KeyError, exc=e) output_file_path = output_file_path or json_file_path diff --git a/fre/cmor/cmor_mixer.py b/fre/cmor/cmor_mixer.py index 43aa55d66..3e1513ff6 100755 --- a/fre/cmor/cmor_mixer.py +++ b/fre/cmor/cmor_mixer.py @@ -50,6 +50,8 @@ DEPTH_COORDS, CMOR_NC_FILE_ACTION, CMOR_VERBOSITY, CMOR_EXIT_CTL, CMOR_MK_SUBDIRS, CMOR_LOG ) +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, @@ -523,8 +525,8 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, lev_bnds = create_lev_bnds(bound_these=lev, with_these=ds['z_i']) fre_logger.info('created lev_bnds...') except Exception as exc: - fre_logger.error("the cmor module always requires vertical levels to have bounds.") - raise KeyError("CMOR requires the input data have vertical level boundaries (bnds)") from exc + log_and_raise("CMOR requires the input data have vertical level boundaries (bnds)", + KeyError, exc=exc) fre_logger.info('lev_bnds = \n%s', lev_bnds) cmor_z = cmor.axis('depth_coord', diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index a6fec203b..86538104d 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -13,6 +13,8 @@ import fre.yamltools.combine_yamls_script as cy +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) ######VALIDATE##### @@ -127,8 +129,7 @@ def set_rose_suite(yamlfile: dict, rose_suite: metomi.rose.config.ConfigNode) -> pa_scripts = "" rd_scripts = "" if pp is None: - fre_logger.error("Missing 'postprocess' section!") - raise ValueError + log_and_raise("Missing 'postprocess' section!") for pp_key, pp_value in pp.items(): if pp_key == "settings" or pp_key == "switches": @@ -152,8 +153,7 @@ def set_rose_suite(yamlfile: dict, rose_suite: metomi.rose.config.ConfigNode) -> # If there is already a script defined for preanalysis, fail # More than 1 script is not supported yet if pa_scripts: - fre_logger.error("Using more than 1 pre-analysis script is not supported") - raise ValueError + log_and_raise("Using more than 1 pre-analysis script is not supported") pa_scripts += f"{script} " diff --git a/fre/pp/split_netcdf_script.py b/fre/pp/split_netcdf_script.py index ed08688d6..fed7db58e 100644 --- a/fre/pp/split_netcdf_script.py +++ b/fre/pp/split_netcdf_script.py @@ -19,6 +19,7 @@ import xarray as xr import yaml +from fre import log_and_raise from fre.app.helpers import get_variables @@ -66,8 +67,7 @@ def split_netcdf(inputDir, outputDir, component, history_source, use_subdirs, #Verify input/output dirs exist and are dirs if not os.path.isdir(inputDir): - fre_logger.error(f"error: input dir {inputDir} does not exist or is not a directory") - raise OSError(f"error: input dir {inputDir} does not exist or is not a directory") + log_and_raise(f"error: input dir {inputDir} does not exist or is not a directory", OSError) if not os.path.isdir(outputDir): if os.path.isfile(outputDir): fre_logger.error(f"error: output dir {outputDir} is a file. Please specify a directory.") @@ -180,8 +180,7 @@ def split_file_xarray(infile, outfiledir, var_list='all'): os.makedirs(outfiledir) if not os.path.isfile(infile): - fre_logger.error(f"error: input file {infile} not found. Please check the path.") - raise OSError(f"error: input file {infile} not found. Please check the path.") + log_and_raise(f"error: input file {infile} not found. Please check the path.", OSError) dataset = xr.load_dataset(infile, decode_cf=False, decode_times=False, decode_coords="all") allvars = dataset.data_vars.keys() diff --git a/fre/tests/test_fre_log_and_raise.py b/fre/tests/test_fre_log_and_raise.py new file mode 100644 index 000000000..a08728bc1 --- /dev/null +++ b/fre/tests/test_fre_log_and_raise.py @@ -0,0 +1,56 @@ +""" +Tests for the log_and_raise helper function in fre/__init__.py. + +Validates that log_and_raise: +- logs an error message via fre_logger +- raises the correct exception type with the expected message +- supports exception chaining via the exc parameter +- defaults to ValueError when no exc_type is given +- reports the correct caller in log output (not log_and_raise itself) +""" + +import logging + +import pytest + +from fre import log_and_raise + + +def test_log_and_raise_default_valueerror(caplog): + ''' log_and_raise with default exc_type should raise ValueError and log an error ''' + with caplog.at_level(logging.ERROR): + with pytest.raises(ValueError, match="something went wrong"): + log_and_raise("something went wrong") + assert "something went wrong" in caplog.text + + +def test_log_and_raise_custom_exception_type(caplog): + ''' log_and_raise with custom exc_type should raise that type ''' + with caplog.at_level(logging.ERROR): + with pytest.raises(OSError, match="file not found"): + log_and_raise("file not found", OSError) + assert "file not found" in caplog.text + + +def test_log_and_raise_with_chained_exception(caplog): + ''' log_and_raise with exc should chain exceptions via from ''' + original = KeyError("original key error") + with caplog.at_level(logging.ERROR): + with pytest.raises(KeyError, match="wrapped error") as exc_info: + log_and_raise("wrapped error", KeyError, exc=original) + assert exc_info.value.__cause__ is original + assert "wrapped error" in caplog.text + + +def test_log_and_raise_caller_in_log(caplog): + ''' log output should reference the calling function, not log_and_raise ''' + with caplog.at_level(logging.ERROR): + with pytest.raises(ValueError): + log_and_raise("test caller context") + # the log record funcName should be this test function, not 'log_and_raise' + for record in caplog.records: + if "test caller context" in record.message: + assert record.funcName == "test_log_and_raise_caller_in_log" + break + else: + pytest.fail("expected log record not found") From 7bc513d6fcac0812d9c7a587b52170387a0f18a1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:09:38 +0000 Subject: [PATCH 02/10] Address code review: add usage examples to log_and_raise docstring Agent-Logs-Url: https://github.com/NOAA-GFDL/fre-cli/sessions/98852929-27ab-48b1-8b84-1787a26e72f3 Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/__init__.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/fre/__init__.py b/fre/__init__.py index 50147a4b9..9b568a244 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -28,6 +28,18 @@ def log_and_raise(msg, exc_type=ValueError, exc=None): :param exc: Optional original exception to chain from (uses ``raise ... from exc``). :type exc: Exception, optional :raises exc_type: Always raised with the given message. + + Examples:: + + # raises ValueError (default) and logs the message at ERROR level + log_and_raise("something went wrong") + + # raises a specific exception type + log_and_raise("file not found", OSError) + + # chains from an original exception (raise ... from exc) + except KeyError as e: + log_and_raise("update failed", KeyError, exc=e) """ fre_logger.error(msg, stacklevel=2) if exc is not None: From c3663414fa4ab442e0cf6b2b792368aa7b693352 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:47:08 +0000 Subject: [PATCH 03/10] Convert raise statements to log_and_raise across 33 files Replace bare raise ExcType(msg) statements with log_and_raise(msg, ExcType) calls, which logs the error before raising. This ensures all raised exceptions are consistently logged via fre_logger.error. Changes: - Add 'from fre import log_and_raise' imports where missing - Convert raise ValueError/TypeError/FileNotFoundError/FileExistsError/ IOError/RuntimeError/OSError/Exception/NotImplementedError/KeyError/ NameError to log_and_raise equivalents - For 'raise ExcType(msg) from exc' patterns, use log_and_raise(msg, ExcType, exc=exc) - For patterns where fre_logger.error + raise had the same message, collapse into single log_and_raise call - For patterns where fre_logger.error + raise had different messages, keep fre_logger.error and replace raise with log_and_raise - Preserve all bare re-raises (raise with no arguments) - Preserve NotImplementedError in abstract methods in timeAverager.py - Collapse multi-line raise statements into single log_and_raise calls - Convert multi-argument ValueError to single string message log_and_raise calls Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/app/generate_time_averages/combine.py | 12 ++++----- .../frenctoolsTimeAverager.py | 11 ++++---- .../generate_time_averages.py | 4 +-- fre/app/generate_time_averages/wrapper.py | 9 ++++--- fre/app/helpers.py | 6 +++-- .../mask_atmos_plevel/mask_atmos_plevel.py | 8 +++--- fre/app/regrid_xy/regrid_xy.py | 14 +++++------ .../remap_pp_components.py | 25 +++++++++---------- fre/check/frecheck.py | 3 ++- fre/cmor/cmor_helpers.py | 4 +-- fre/cmor/cmor_mixer.py | 5 ++-- fre/cmor/cmor_yamler.py | 11 ++++---- fre/make/create_checkout_script.py | 10 +++++--- fre/make/create_compile_script.py | 3 ++- fre/make/create_docker_script.py | 3 ++- fre/make/create_makefile_script.py | 3 ++- fre/make/gfdlfremake/platformfre.py | 21 ++++++++-------- fre/make/gfdlfremake/targetfre.py | 8 +++--- fre/make/gfdlfremake/yamlfre.py | 5 ++-- fre/make/make_helpers.py | 4 ++- fre/make/run_fremake_script.py | 3 ++- fre/pp/checkout_script.py | 9 ++++--- fre/pp/histval_script.py | 7 +++--- fre/pp/install_script.py | 3 ++- fre/pp/nccheck_script.py | 9 +++---- fre/pp/ppval_script.py | 11 ++++---- fre/pp/rename_split_script.py | 20 ++++++++------- fre/pp/run_script.py | 5 ++-- fre/pp/split_netcdf_script.py | 19 ++++++-------- fre/pp/status_script.py | 5 ++-- fre/pp/trigger_script.py | 3 ++- fre/pp/validate_script.py | 7 +++--- fre/run/frerun.py | 3 ++- 33 files changed, 146 insertions(+), 127 deletions(-) diff --git a/fre/app/generate_time_averages/combine.py b/fre/app/generate_time_averages/combine.py index db8ffbc92..c96c34812 100644 --- a/fre/app/generate_time_averages/combine.py +++ b/fre/app/generate_time_averages/combine.py @@ -11,6 +11,7 @@ import xarray as xr from ..helpers import change_directory +from fre import log_and_raise fre_logger = logging.getLogger(__name__) duration_parser = metomi.isodatetime.parsers.DurationParser() @@ -35,8 +36,7 @@ def form_bronx_directory_name(frequency: str, elif frequency == "yr": frequency_label = "annual" else: - raise ValueError(f"Frequency '{frequency}' not recognized or supported") - interval_object = duration_parser.parse(interval) + log_and_raise(f"Frequency '{frequency}' not recognized or supported") return frequency_label + '_' + str(interval_object.years) + 'yr' @@ -57,9 +57,9 @@ def merge_netcdfs(input_file_glob: str, output_file: str) -> None: if len(input_files) >= 1: fre_logger.debug(f"Input file search string '{input_file_glob}' matched {len(input_files)} files") else: - raise FileNotFoundError(f"'{input_file_glob}' resolves to no files") + log_and_raise(f"'{input_file_glob}' resolves to no files", FileNotFoundError) if Path(output_file).exists(): - raise FileExistsError(f"Output file '{output_file}' already exists") + log_and_raise(f"Output file '{output_file}' already exists", FileExistsError) ds = xr.open_mfdataset(input_files, compat='override', coords='minimal') ds.to_netcdf(output_file, unlimited_dims=['time']) @@ -93,9 +93,7 @@ def combine( root_in_dir: str, :rtype: None """ if frequency not in ["yr", "mon"]: - raise ValueError(f"Frequency '{frequency}' not recognized or supported") - - if frequency == "yr": + log_and_raise(f"Frequency '{frequency}' not recognized or supported") frequency_iso = "P1Y" elif frequency == "mon": frequency_iso = "P1M" diff --git a/fre/app/generate_time_averages/frenctoolsTimeAverager.py b/fre/app/generate_time_averages/frenctoolsTimeAverager.py index 59ebf95ce..00b05cc36 100644 --- a/fre/app/generate_time_averages/frenctoolsTimeAverager.py +++ b/fre/app/generate_time_averages/frenctoolsTimeAverager.py @@ -7,6 +7,7 @@ from cdo import Cdo from .timeAverager import timeAverager +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -35,7 +36,7 @@ def generate_timavg(self, infile=None, outfile=None): """ exitstatus=1 if self.avg_type not in ['month','all']: - raise ValueError(f'avg_type= {self.avg_type} not supported by this class at this time.') + log_and_raise(f'avg_type= {self.avg_type} not supported by this class at this time.') if self.unwgt: fre_logger.warning('unwgt=True unsupported by frenctoolsAverager. Ignoring!!!') @@ -45,7 +46,7 @@ def generate_timavg(self, infile=None, outfile=None): ' not currently supported for frenctols time averaging. ignoring!', self.var) if infile is None: - raise ValueError('Need an input file, specify a value for the infile argument') + log_and_raise('Need an input file, specify a value for the infile argument') if outfile is None: outfile='frenctoolsTimeAverage_'+infile @@ -53,7 +54,7 @@ def generate_timavg(self, infile=None, outfile=None): #check for existence of timavg.csh. If not found, issue might be that user is not in env with frenctools. if shutil.which('timavg.csh') is None: - raise ValueError('did not find timavg.csh') + log_and_raise('did not find timavg.csh') fre_logger.info('timeaverager using: %s', shutil.which("timavg.csh")) @@ -106,7 +107,7 @@ def generate_timavg(self, infile=None, outfile=None): if subp.returncode != 0: fre_logger.error('stderror = %s', stderror) - raise ValueError(f'error: timavg.csh had a problem, subp.returncode = {subp.returncode}') + log_and_raise(f'error: timavg.csh had a problem, subp.returncode = {subp.returncode}') fre_logger.info('%s climatology successfully ran',nc_monthly_file) exitstatus=0 @@ -130,7 +131,7 @@ def generate_timavg(self, infile=None, outfile=None): if subp.returncode != 0: fre_logger.error('stderror = %s', stderror) - raise ValueError(f'error: timavgcsh command not properly executed, subp.returncode = {subp.returncode}') + log_and_raise(f'error: timavgcsh command not properly executed, subp.returncode = {subp.returncode}') fre_logger.info('climatology successfully ran') exitstatus = 0 diff --git a/fre/app/generate_time_averages/generate_time_averages.py b/fre/app/generate_time_averages/generate_time_averages.py index 470597741..196fb0a47 100755 --- a/fre/app/generate_time_averages/generate_time_averages.py +++ b/fre/app/generate_time_averages/generate_time_averages.py @@ -42,9 +42,9 @@ def generate_time_average(infile: Union[str, List[str]] = None, start_time = time.perf_counter() fre_logger.debug('called generate_time_average') if None in [infile, outfile, pkg]: - raise ValueError('infile, outfile, and pkg are required inputs') + log_and_raise('infile, outfile, and pkg are required inputs') if pkg not in ['cdo', 'fre-nctools', 'fre-python-tools']: - raise ValueError(f'argument pkg = {pkg} not known, must be one of: cdo, fre-nctools, fre-python-tools') + log_and_raise(f'argument pkg = {pkg} not known, must be one of: cdo, fre-nctools, fre-python-tools') exitstatus = 1 myavger = None diff --git a/fre/app/generate_time_averages/wrapper.py b/fre/app/generate_time_averages/wrapper.py index 661363327..2609be7e5 100644 --- a/fre/app/generate_time_averages/wrapper.py +++ b/fre/app/generate_time_averages/wrapper.py @@ -8,6 +8,7 @@ from metomi.isodatetime.dumpers import TimePointDumper from . import generate_time_averages +from fre import log_and_raise fre_logger = logging.getLogger(__name__) one_year = DurationParser().parse('P1Y') @@ -77,7 +78,7 @@ def generate_wrapper(cycle_point: str, input_interval = DurationParser().parse(input_interval) if frequency not in ["yr", "mon"]: - raise ValueError(f"Frequency '{frequency}' not recognized or supported") + log_and_raise(f"Frequency '{frequency}' not recognized or supported") # convert frequency 'yr' or 'mon' to ISO8601 if frequency == 'mon': @@ -109,7 +110,7 @@ def generate_wrapper(cycle_point: str, fre_logger.debug("Annual ts to annual climo from source %s:%s variables", source, len(variables)) else: - raise FileNotFoundError(f"Expected files not found in {subdir_yr}") + log_and_raise(f"Expected files not found in {subdir_yr}", FileNotFoundError) elif subdir_mon.exists(): results = glob.glob(str(subdir_mon / f"{source}.{yyyy}01-{zzzz}12.*.nc")) if results: @@ -118,7 +119,7 @@ def generate_wrapper(cycle_point: str, fre_logger.debug("monthly ts to annual climo from source %s:%s variables", source, len(variables)) else: - raise FileNotFoundError(f"Expected files not found in {subdir_mon}") + log_and_raise(f"Expected files not found in {subdir_mon}", FileNotFoundError) else: fre_logger.debug('Skipping %s as it does not appear to be monthly or annual frequency', source) fre_logger.debug('neither %s nor %s exists', subdir_mon, subdir_yr) @@ -132,7 +133,7 @@ def generate_wrapper(cycle_point: str, fre_logger.debug("monthly ts to monthly climo from source %s:%s variables", source, len(variables)) else: - raise FileNotFoundError(f"Expected files not found in {subdir}") + log_and_raise(f"Expected files not found in {subdir}", FileNotFoundError) else: fre_logger.debug("Skipping %s as it does not appear to be monthly frequency", source) fre_logger.debug(" %s does not exist", subdir) diff --git a/fre/app/helpers.py b/fre/app/helpers.py index 5548e3619..5f878d685 100644 --- a/fre/app/helpers.py +++ b/fre/app/helpers.py @@ -6,6 +6,8 @@ import yaml +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) @@ -25,7 +27,7 @@ def get_variables(yml: dict, pp_comp: str) -> dict: fre_logger.debug(f"Yaml file information: {yml}") fre_logger.debug(f"PP component: {pp_comp}") if not isinstance(yml, dict): - raise TypeError("yml should be of type dict, but was of type " + str(type(yml))) + log_and_raise("yml should be of type dict, but was of type " + str(type(yml)), TypeError) src_vars={} @@ -61,7 +63,7 @@ def get_variables(yml: dict, pp_comp: str) -> dict: # If the dictionary is empty (no overlap of pp components and components # in pp yaml) --> error if not src_vars: - raise ValueError(f"PP component, {pp_comp}, not found in pp yaml configuration!") + log_and_raise(f"PP component, {pp_comp}, not found in pp yaml configuration!") return src_vars diff --git a/fre/app/mask_atmos_plevel/mask_atmos_plevel.py b/fre/app/mask_atmos_plevel/mask_atmos_plevel.py index 65c3a7f79..9f47a0f91 100755 --- a/fre/app/mask_atmos_plevel/mask_atmos_plevel.py +++ b/fre/app/mask_atmos_plevel/mask_atmos_plevel.py @@ -8,6 +8,8 @@ import xarray as xr +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) @@ -42,7 +44,7 @@ def mask_atmos_plevel_subtool(infile: str = None, """ # check if input file exists, raise an error if not if not os.path.exists(infile): - raise FileNotFoundError(f"ERROR: Input file {infile} does not exist") + log_and_raise(f"ERROR: Input file {infile} does not exist", FileNotFoundError) # Warn if outfile exists, but continue and recreate if os.path.exists(outfile): @@ -62,7 +64,7 @@ def mask_atmos_plevel_subtool(infile: str = None, if "ps" not in list(ds_ps.variables): fre_logger.warning('pressure variable ps not found in target pressure file') if not warn_no_ps: - raise ValueError(f"Surface pressure file {psfile} does not contain surface pressure.") + log_and_raise(f"Surface pressure file {psfile} does not contain surface pressure.") fre_logger.warning('warn_no_ps is True! this means I\'m going to no-op gracefully instead of raising an error') return @@ -143,7 +145,7 @@ def mask_field_above_surface_pressure(ds: xr.Dataset, try: missing_value = ds[var].encoding['missing_value'] except Exception as exc: - raise KeyError("input file does not contain missing_value, a required variable attribute") from exc + log_and_raise("input file does not contain missing_value, a required variable attribute", KeyError, exc=exc) fre_logger.info('masking do not need looping') masked = xr.where(plev_extended > ps_extended, missing_value, ds[var]) diff --git a/fre/app/regrid_xy/regrid_xy.py b/fre/app/regrid_xy/regrid_xy.py index f70d6e472..3f2533061 100644 --- a/fre/app/regrid_xy/regrid_xy.py +++ b/fre/app/regrid_xy/regrid_xy.py @@ -7,6 +7,7 @@ import yaml from fre.app import helpers +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -83,7 +84,7 @@ def get_grid_spec(datadict: dict) -> str: elif Path("grid_spec.nc").exists(): grid_spec = "grid_spec.nc" else: - raise IOError(f"Cannot find mosaic.nc or grid_spec.nc in tar file {pp_grid_spec_tar}") + log_and_raise(f"Cannot find mosaic.nc or grid_spec.nc in tar file {pp_grid_spec_tar}", IOError) fre_logger.debug(f"Current directory: {Path.cwd()}") @@ -122,7 +123,7 @@ def get_input_mosaic(datadict: dict) -> str: #check if the mosaic file exists in the current directory if not Path(mosaic_file).exists(): - raise IOError(f"Cannot find mosaic file {mosaic_file} in current work directory {work_dir}") + log_and_raise(f"Cannot find mosaic file {mosaic_file} in current work directory {work_dir}", IOError) return mosaic_file @@ -299,16 +300,15 @@ def regrid_xy(yamlfile: str, #check if input_dir exists if not Path(input_dir).exists(): - raise RuntimeError(f"Input directory {input_dir} containing the input data files does not exist") + log_and_raise(f"Input directory {input_dir} containing the input data files does not exist", RuntimeError) #check if output_dir exists if not Path(output_dir).exists(): - raise RuntimeError(f"Output directory {output_dir} where regridded data" \ - "will be outputted does not exist") + log_and_raise(f"Output directory {output_dir} where regridded data will be outputted does not exist", RuntimeError) #check if work_dir exists if not Path(work_dir).exists(): - raise RuntimeError(f"Specified working directory {work_dir} does not exist") + log_and_raise(f"Specified working directory {work_dir} does not exist", RuntimeError) #work in working directory with helpers.change_directory(work_dir): @@ -399,4 +399,4 @@ def regrid_xy(yamlfile: str, if fregrid_job.returncode == 0: fre_logger.info(fregrid_job.stdout.split("\n")[-3:]) else: - raise RuntimeError(fregrid_job.stderr) + log_and_raise(fregrid_job.stderr, RuntimeError) diff --git a/fre/app/remap_pp_components/remap_pp_components.py b/fre/app/remap_pp_components/remap_pp_components.py index 5bcdc924c..2a74fc93d 100755 --- a/fre/app/remap_pp_components/remap_pp_components.py +++ b/fre/app/remap_pp_components/remap_pp_components.py @@ -11,6 +11,7 @@ from typing import List import yaml from fre.app import helpers +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -32,13 +33,13 @@ def verify_dirs(in_dir: str, out_dir: str): if Path(in_dir).is_dir(): fre_logger.info("Input directory is a valid directory") else: - raise ValueError(f"Error: Input directory {in_dir} does not exist or is not a valid directory") + log_and_raise(f"Error: Input directory {in_dir} does not exist or is not a valid directory") # Verify output directory exists and is a directory if Path(out_dir).is_dir(): fre_logger.info("Output directory is a valid directory") else: - raise ValueError(f"Error: Output directory {out_dir} does not exist or is not a valid directory") + log_and_raise(f"Error: Output directory {out_dir} does not exist or is not a valid directory") def create_dir(out_dir: str, comp: str, freq: str, chunk:str, ens:str, dir_ts: bool) -> str: """ @@ -115,7 +116,7 @@ def freq_to_legacy(iso_dura: str) -> str: elif iso_dura in ['PT30M', 'PT0.5H']: freq_legacy = '30min' else: - raise ValueError(f"Could not convert ISO duration '{iso_dura}'") + log_and_raise(f"Could not convert ISO duration '{iso_dura}'") return freq_legacy @@ -161,7 +162,7 @@ def freq_to_date_format(iso_freq: str) -> str: elif (iso_freq[:2]=='PT') and (iso_freq[-1:]=='H'): return 'CCYYMMDDThh' else: - raise ValueError(f'ERROR: Unknown Frequency {iso_freq}') + log_and_raise(f'ERROR: Unknown Frequency {iso_freq}') def truncate_date(date: str, freq: str) -> str: """ @@ -231,7 +232,7 @@ def search_files(product: str, var: list, source: str, freq: str, fre_logger.info("var: %s", v) f = glob.glob(f"{source}.{v}*.nc") if not f: #if glob returns empty list - raise ValueError("Variable {v} could not be found or does not exist.") + log_and_raise("Variable {v} could not be found or does not exist.") files.extend(f) else: if product == "ts": @@ -240,7 +241,7 @@ def search_files(product: str, var: list, source: str, freq: str, elif product == "av": date = truncate_date(begin, "P1Y") else: - raise ValueError("Product not set to ts or av.") + log_and_raise("Product not set to ts or av.") if var == "all": f = glob.glob(f"{source}.{date}-*.*.nc") @@ -250,7 +251,7 @@ def search_files(product: str, var: list, source: str, freq: str, fre_logger.info("var: %s", v) f = glob.glob(f"{source}.{date}-*.{v}*.nc") if not f: #if glob returns empty list - raise ValueError("Variable {v} could not be found or does not exist.") + log_and_raise("Variable {v} could not be found or does not exist.") files.extend(f) if product == "av" and current_chunk == "P1Y": f = glob.glob(f"{source}.{date}.*.nc") @@ -276,7 +277,7 @@ def get_varlist(comp_info: dict, product: str, req_source: str, src_vars: dict) """ if product == "static": if comp_info.get("static") is None: - raise ValueError( + log_and_raise( f"Product is set to static but no static sources/variables defined for " f"{comp_info.get('type')}" ) @@ -542,11 +543,9 @@ def remap_pp_components(input_dir: str, output_dir: str, begin_date: str, curren if not files: if ens_mem is not None: - raise ValueError("\nError: No input files found in", - f"{input_dir}/{g}/{ens_mem}/{s}/{f}/{c}") + log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{ens_mem}/{s}/{f}/{c}") else: - raise ValueError("\nError: No input files found in", - f"{input_dir}/{g}/{s}/{f}/{c}") + log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{s}/{f}/{c}") os.chdir(output_dir) @@ -592,7 +591,7 @@ def remap_pp_components(input_dir: str, output_dir: str, begin_date: str, curren if offline_srcs is not None: for src_file in offline_srcs: if not Path(src_file).exists(): - raise ValueError("Offline diagnostic file defined but " + log_and_raise("Offline diagnostic file defined but " f"{src_file} does not exist or cannot " "be found!") diff --git a/fre/check/frecheck.py b/fre/check/frecheck.py index 4c1cf4f4d..1fe0fb2ae 100644 --- a/fre/check/frecheck.py +++ b/fre/check/frecheck.py @@ -3,6 +3,7 @@ import click from .frecheckexample import check_test_function +from fre import log_and_raise @click.group(help=click.style(" - check subcommands !!!NotImplemented!!!", fg=(162,91,232))) def check_cli(): @@ -13,4 +14,4 @@ def check_cli(): def function(uppercase): """ - Execute fre check test """ check_test_function(uppercase) - raise NotImplementedError('fre check has not been implemented yet!') + log_and_raise('fre check has not been implemented yet!', NotImplementedError) diff --git a/fre/cmor/cmor_helpers.py b/fre/cmor/cmor_helpers.py index aa3f35d9a..f60425eb3 100644 --- a/fre/cmor/cmor_helpers.py +++ b/fre/cmor/cmor_helpers.py @@ -773,12 +773,12 @@ def filter_brands( brands: list, if len(filtered_brands) == 0: fre_logger.error('cmip7 brand disambiguation eliminated all candidates ' 'from %s', brands) - raise ValueError( + log_and_raise( f'multiple brands {brands} found for {target_var}, ' f'but none survived disambiguation filtering') fre_logger.error('cmip7 brand disambiguation could not resolve between ' '%s', filtered_brands) - raise ValueError( + log_and_raise( f'multiple brands {filtered_brands} remain for {target_var} after ' f'disambiguation \u2014 cannot determine which brand to use') diff --git a/fre/cmor/cmor_mixer.py b/fre/cmor/cmor_mixer.py index 3e1513ff6..ae5d0611f 100755 --- a/fre/cmor/cmor_mixer.py +++ b/fre/cmor/cmor_mixer.py @@ -146,9 +146,8 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, input_vert_dim = get_vertical_dimension(ds, target_var) ) else: - fre_logger.error('cmip7 case detected, but dimensions of input data do not match ' - 'any of those found for the associated brands.') - raise ValueError + log_and_raise('cmip7 case detected, but dimensions of input data do not match ' + 'any of those found for the associated brands.', ValueError) else: fre_logger.debug('non-cmip7 case detected, skipping variable brands') diff --git a/fre/cmor/cmor_yamler.py b/fre/cmor/cmor_yamler.py index f1569a7bb..a4549dece 100644 --- a/fre/cmor/cmor_yamler.py +++ b/fre/cmor/cmor_yamler.py @@ -24,6 +24,7 @@ from .cmor_mixer import cmor_run_subtool from .cmor_helpers import ( check_path_existence, iso_to_bronx_chunk, #conv_mip_to_bronx_freq, get_bronx_freq_from_mip_table ) +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -123,8 +124,8 @@ def cmor_yaml_subtool( yamlfile: str = None, fre_logger.info('attempt to create it...') Path(cmorized_outdir).mkdir(exist_ok=False, parents=True) except Exception as exc: #uncovered - raise OSError( - f'could not create cmorized_outdir = {cmorized_outdir} for some reason!') from exc + log_and_raise( + f'could not create cmorized_outdir = {cmorized_outdir} for some reason!', OSError, exc=exc) # path to input user/experiment configuration, expected by CMOR json_exp_config = os.path.expandvars( @@ -179,7 +180,7 @@ def cmor_yaml_subtool( yamlfile: str = None, if freq is None: if mip_era == 'CMIP7': # CMIP7 tables do not carry frequency — the user must always specify it - raise ValueError( + log_and_raise( f'freq is required for CMIP7 but was not specified for table target {table_name}.\n' f' CMIP7 MIP tables do not contain frequency metadata.\n' f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.') @@ -191,7 +192,7 @@ def cmor_yaml_subtool( yamlfile: str = None, except (KeyError, TypeError): freq = None if freq is None: - raise ValueError( + log_and_raise( f'not enough frequency information to process variables for {table_name}.\n' f' freq was not specified in the cmor yaml, and could not be derived from the MIP table.\n' f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.') @@ -210,7 +211,7 @@ def cmor_yaml_subtool( yamlfile: str = None, grid_desc = gridding_dict['grid_desc'] nom_res = gridding_dict['nom_res'] if None in [grid_label, grid_desc, nom_res]: - raise ValueError('gridding dictionary, if present, must have all three fields be non-empty.') + log_and_raise('gridding dictionary, if present, must have all three fields be non-empty.') # gridding info of data ---- revisit table_components_list = cmor_yaml_table_target['target_components'] diff --git a/fre/make/create_checkout_script.py b/fre/make/create_checkout_script.py index e2595abea..02ccf6126 100644 --- a/fre/make/create_checkout_script.py +++ b/fre/make/create_checkout_script.py @@ -18,6 +18,7 @@ from typing import Optional import fre.yamltools.combine_yamls_script as cy from .gfdlfremake import varsfre, yamlfre, checkout, targetfre +from fre import log_and_raise # set up logging fre_logger = logging.getLogger(__name__) @@ -115,7 +116,7 @@ def checkout_create(yamlfile: str, platform: tuple, target: tuple, jobs_str = str(njobs) if isinstance(njobs, bool) and execute: - raise ValueError ('njobs must be defined as a number if --execute flag is True') + log_and_raise('njobs must be defined as a number if --execute flag is True') # Determine backgrounding syntax # parallel_cmd is the suffix added to shell commands @@ -151,7 +152,7 @@ def checkout_create(yamlfile: str, platform: tuple, target: tuple, for platform_name in platform: if not model_yaml.platforms.hasPlatform(platform_name): - raise ValueError(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml") platform_info = model_yaml.platforms.getPlatformFromName(platform_name) @@ -180,8 +181,9 @@ def checkout_create(yamlfile: str, platform: tuple, target: tuple, try: subprocess.run(args=[checkout_sh_path], check=True) except Exception as exc: - raise OSError(f"\nError executing checkout script: {checkout_sh_path}.", - f"\nTry removing test folder: {platform_info['modelRoot']}\n") from exc + log_and_raise(f"\nError executing checkout script: {checkout_sh_path}." + f"\nTry removing test folder: {platform_info['modelRoot']}\n", + OSError, exc=exc) else: return diff --git a/fre/make/create_compile_script.py b/fre/make/create_compile_script.py index b40cf228f..4a50964fd 100644 --- a/fre/make/create_compile_script.py +++ b/fre/make/create_compile_script.py @@ -31,6 +31,7 @@ yamlfre ) +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -101,7 +102,7 @@ def compile_create(yamlfile:str, platform:tuple[str], target:tuple[str], makejob for target_name in tlist: target = targetfre.fretarget(target_name) if not model_yaml.platforms.hasPlatform(platform_name): - raise ValueError(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml") platform = model_yaml.platforms.getPlatformFromName(platform_name) ## Make the bld_dir based on the modelRoot, the platform, and the target diff --git a/fre/make/create_docker_script.py b/fre/make/create_docker_script.py index 84752b10d..70d670ec0 100644 --- a/fre/make/create_docker_script.py +++ b/fre/make/create_docker_script.py @@ -23,6 +23,7 @@ yamlfre ) +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -79,7 +80,7 @@ def dockerfile_create(yamlfile: str, platform: tuple[str], target: tuple[str], for targetName in tlist: targetObject = targetfre.fretarget(targetName) if not modelYaml.platforms.hasPlatform(platformName): - raise ValueError (f"{platformName} does not exist in platforms.yaml") + log_and_raise(f"{platformName} does not exist in platforms.yaml") platform = modelYaml.platforms.getPlatformFromName(platformName) diff --git a/fre/make/create_makefile_script.py b/fre/make/create_makefile_script.py index 896edb079..47bc96349 100644 --- a/fre/make/create_makefile_script.py +++ b/fre/make/create_makefile_script.py @@ -37,6 +37,7 @@ import fre.yamltools.combine_yamls_script as cy from fre.make.make_helpers import get_mktemplate_path from .gfdlfremake import makefilefre, varsfre, targetfre, yamlfre +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -92,7 +93,7 @@ def makefile_create(yamlfile: str, platform: tuple[str], target: tuple[str]): if model_yaml.platforms.hasPlatform(platform_name): pass else: - raise ValueError (f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml") platform=model_yaml.platforms.getPlatformFromName(platform_name) ## Make the bld_dir based on the modelRoot, the platform, and the target diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index 1d6b0f049..898404cdc 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -1,4 +1,5 @@ import yaml +from fre import log_and_raise class platforms (): def __init__(self,platforminfo): @@ -17,12 +18,12 @@ def __init__(self,platforminfo): try: p["name"] except: - raise Exception("At least one of the platforms is missing a name in "+fname+"\n") + log_and_raise("At least one of the platforms is missing a name in "+fname+"\n", Exception) ## Check the compiler try: p["compiler"] except: - raise Exception("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") + log_and_raise("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n", Exception) ## Check for list of commands that include modules to initialize, load, and unload try: p["envSetup"] @@ -52,14 +53,14 @@ def __init__(self,platforminfo): try: p["containerBuild"] except: - raise Exception("Platform "+p["name"]+": You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") + log_and_raise("Platform "+p["name"]+": You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n", Exception) if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": - raise ValueError("Platform "+p["name"]+": Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") + log_and_raise("Platform "+p["name"]+": Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") ## Get the name of the base container try: p["containerBase"] except: - raise NameError("Platform "+p["name"]+": You must specify the base container you wish to use to build your application") + log_and_raise("Platform "+p["name"]+": You must specify the base container you wish to use to build your application", NameError) ## Check if this is a 2 step (multi stage) build try: p["container2step"] @@ -70,7 +71,7 @@ def __init__(self,platforminfo): try: p["container2base"] except: - raise NameError ("Platform "+p["name"]+": container2step is True, so you must define a container2base\n") + log_and_raise("Platform "+p["name"]+": container2step is True, so you must define a container2base\n", NameError) ## Check if there is anything special to copy over else: ## There should not be a second base if this is not a 2 step build @@ -79,7 +80,7 @@ def __init__(self,platforminfo): except: p["container2base"] = "" else: - raise ValueError ("Platform "+p["name"]+": You defined container2base "+p["container2base"]+" but container2step is False\n") + log_and_raise("Platform "+p["name"]+": You defined container2base "+p["container2base"]+" but container2step is False\n") ## Get any commands to execute in the dockerfile RUN command try: p["RUNenv"] @@ -94,9 +95,9 @@ def __init__(self,platforminfo): try: p["containerRun"] except: - raise Exception("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") + log_and_raise("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n", Exception) if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": - raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") + log_and_raise("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") ## Get the path to where the output model container will be located try: @@ -108,7 +109,7 @@ def __init__(self,platforminfo): try: p["mkTemplate"] except: - raise ValueError("The platform "+p["name"]+" must specify a mkTemplate \n") + log_and_raise("The platform "+p["name"]+" must specify a mkTemplate \n") def hasPlatform(self,name): """ diff --git a/fre/make/gfdlfremake/targetfre.py b/fre/make/gfdlfremake/targetfre.py index e6fbfb1a6..4f5363228 100644 --- a/fre/make/gfdlfremake/targetfre.py +++ b/fre/make/gfdlfremake/targetfre.py @@ -1,3 +1,5 @@ +from fre import log_and_raise + class fretarget: """ Class: Stores information about the target @@ -52,19 +54,19 @@ def __init__(self,t): if self.debug: try: if self.repro or self.prod == True: - raise ValueError(errormsg) + log_and_raise(errormsg) except ValueError: raise elif self.repro: try: if self.prod == True: - raise ValueError(errormsg) + log_and_raise(errormsg) except ValueError: raise else: try: if self.prod == False: - raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") + log_and_raise("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") except ValueError: raise diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 50413c4d7..aca681deb 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -4,6 +4,7 @@ import yaml from jsonschema import validate, ValidationError, SchemaError from . import platformfre +from fre import log_and_raise def parseCompile(fname,v): """ @@ -32,12 +33,12 @@ def __init__(self,compileinfo): self.yaml = compileinfo # Check if self.yaml is None if self.yaml is None: - raise ValueError("The provided compileinfo is None. It must be a valid dictionary.") + log_and_raise("The provided compileinfo is None. It must be a valid dictionary.") ## Check for required experiment name try: self.yaml["experiment"] except KeyError: - raise KeyError("You must set an experiment name to compile \n") + log_and_raise("You must set an experiment name to compile \n", KeyError) ## Check for optional libraries and packages for linking in container try: self.yaml["container_addlibs"] diff --git a/fre/make/make_helpers.py b/fre/make/make_helpers.py index 243d66a39..c49d3c7ee 100644 --- a/fre/make/make_helpers.py +++ b/fre/make/make_helpers.py @@ -5,6 +5,8 @@ import logging from pathlib import Path +from fre import log_and_raise + def get_mktemplate_path(mk_template: str, container_flag: bool, model_root: str = None) -> str: """ @@ -39,7 +41,7 @@ def get_mktemplate_path(mk_template: str, container_flag: bool, model_root: str # Check in template path exists if not Path(template_path).exists(): - raise ValueError("Error w/ mkmf template. Created path from given " + log_and_raise("Error w/ mkmf template. Created path from given " f"filename: {template_path} does not exist.") else: if "/" not in mk_template: diff --git a/fre/make/run_fremake_script.py b/fre/make/run_fremake_script.py index ec506cbc2..a11ac852c 100644 --- a/fre/make/run_fremake_script.py +++ b/fre/make/run_fremake_script.py @@ -13,6 +13,7 @@ from fre.make.create_compile_script import compile_create from fre.make.create_docker_script import dockerfile_create from .gfdlfremake import (varsfre, yamlfre) +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -89,7 +90,7 @@ def fremake_run(yamlfile:str, platform:str, target:str, container_platforms = () for platform_name in plist: if not model_yaml.platforms.hasPlatform(platform_name): - raise ValueError (f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml") platform_info = model_yaml.platforms.getPlatformFromName(platform_name) diff --git a/fre/pp/checkout_script.py b/fre/pp/checkout_script.py index 1c7c90849..8262feec0 100644 --- a/fre/pp/checkout_script.py +++ b/fre/pp/checkout_script.py @@ -11,6 +11,7 @@ from . import make_workflow_name from ..fre import version as fre_ver +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -50,7 +51,7 @@ def checkout_template(experiment = None, platform = None, target = None, branch # check args + set the name of the directory if None in [experiment, platform, target]: os.chdir(go_back_here) - raise ValueError( 'one of these are None: experiment / platform / target = \n' + log_and_raise( 'one of these are None: experiment / platform / target = \n' f'{experiment} / {platform} / {target}' ) #name = f"{experiment}__{platform}__{target}" workflow_name = make_workflow_name(experiment, platform, target) @@ -60,8 +61,8 @@ def checkout_template(experiment = None, platform = None, target = None, branch try: os.makedirs(directory, exist_ok = True) except Exception as exc: - raise OSError( - f"(checkoutScript) directory {directory} wasn't able to be created. exit!") from exc + log_and_raise( + f"(checkoutScript) directory {directory} wasn't able to be created. exit!", OSError, exc=exc) finally: os.chdir(go_back_here) @@ -96,7 +97,7 @@ def checkout_template(experiment = None, platform = None, target = None, branch fre_logger.info( f"ERROR: current branch is '{current_branch}', current tag-describe is '{current_tag}'") os.chdir(go_back_here) - raise ValueError('neither tag nor branch matches the git clone branch arg') #exit(1) + log_and_raise('neither tag nor branch matches the git clone branch arg') # make sure we are back where we should be if os.getcwd() != go_back_here: diff --git a/fre/pp/histval_script.py b/fre/pp/histval_script.py index c00b16134..1e4d26078 100644 --- a/fre/pp/histval_script.py +++ b/fre/pp/histval_script.py @@ -8,6 +8,7 @@ import logging import yaml from . import nccheck_script as ncc +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -53,8 +54,8 @@ def validate(history: str, date_string: str, warn: bool): # Make sure we found atleast one diag_manifest if diag_count < 1: if not warn: - raise FileNotFoundError( - f" No diag_manifest files were found in {history}. History files cannot be validated.") + log_and_raise( + f" No diag_manifest files were found in {history}. History files cannot be validated.", FileNotFoundError) fre_logger.warning( f" Warning: No diag_manifest files were found in {history}. History files cannot be validated.") return 0 @@ -90,7 +91,7 @@ def validate(history: str, date_string: str, warn: bool): #Error Handling if len(mismatches)!=0: fre_logger.error("Unexpected number of timesteps found") - raise ValueError( + log_and_raise( "\n" + str(len(mismatches)) + " file(s) contain(s) an unexpected number of timesteps:\n" + "\n".join(mismatches)) diff --git a/fre/pp/install_script.py b/fre/pp/install_script.py index e6561697f..0be9164a0 100644 --- a/fre/pp/install_script.py +++ b/fre/pp/install_script.py @@ -6,6 +6,7 @@ from pathlib import Path from . import make_workflow_name +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -47,7 +48,7 @@ def install_subtool(experiment, platform, target): else: fre_logger.error(f"ERROR: Please remove installed workflow with 'cylc clean {workflow_name}'" " or move the workflow run directory '{install_dir}'") - raise Exception(f"ERROR: Workflow '{install_dir}' already installed, and the definition has changed!") + log_and_raise(f"ERROR: Workflow '{install_dir}' already installed, and the definition has changed!", Exception) else: fre_logger.info(f"NOTE: About to install workflow into ~/cylc-run/{workflow_name}") cmd = f"cylc install --no-run-name {workflow_name}" diff --git a/fre/pp/nccheck_script.py b/fre/pp/nccheck_script.py index c00703126..985e77754 100644 --- a/fre/pp/nccheck_script.py +++ b/fre/pp/nccheck_script.py @@ -2,6 +2,8 @@ import logging import netCDF4 +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) @@ -37,12 +39,7 @@ def check(file_path: str, num_steps: int): return 0 else: - fre_logger.error( - f" Unexpected number of timesteps found in {file_path}. " - f"Found: {num_actual_steps} timesteps " - f"Expected: {num_steps} timesteps" - ) - raise ValueError( + log_and_raise( f" Unexpected number of timesteps found in {file_path}. " f"Found: {num_actual_steps} timesteps " f"Expected: {num_steps} timesteps" diff --git a/fre/pp/ppval_script.py b/fre/pp/ppval_script.py index 7481d446d..34750e26f 100644 --- a/fre/pp/ppval_script.py +++ b/fre/pp/ppval_script.py @@ -12,6 +12,7 @@ import netCDF4 from . import nccheck_script as ncc +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -121,7 +122,7 @@ def getenot(date_start: str, enot = (diff.days + 1) * 48 else: - raise ValueError(f"Unknown chunk_type '{chunk_type}'") + log_and_raise(f"Unknown chunk_type '{chunk_type}'") fre_logger.debug( f"date start: {date_start}; date end: {date_end}; " @@ -184,7 +185,7 @@ def validate(filepath: str): try: cftime.datetime(1,1,1, calendar = cal) except: - raise ValueError(f" Calendar name must follow cf convention for validation. {cal} is not a valid calendar.") + log_and_raise(f" Calendar name must follow cf convention for validation. {cal} is not a valid calendar.") # date_{end,start} will have a total of date_end.lastindex groups, # that are the year (date_end[1]), the month (date_end[2]), the @@ -233,7 +234,7 @@ def validate(filepath: str): # If none of the expected frequencies are found in filepath, raise ValueError if all(freq not in path_elements for freq in expected_frequencies): - raise ValueError( + log_and_raise( f" Cannot determine frequency from {filepath}. Sub-daily" " files must at minimum be placed in a directory" " corresponding to data frequency: '6hr, 'PT6H', '3hr," @@ -244,11 +245,11 @@ def validate(filepath: str): enot = getenot(date_start, date_end, '30minute', cal) else: - raise ValueError(f"Cannot determine frequency for date '{date_start}'") + log_and_raise(f"Cannot determine frequency for date '{date_start}'") try: ncc.check(filepath, enot) except: - raise ValueError(f"Timesteps found in {filepath} differ from expectation") + log_and_raise(f"Timesteps found in {filepath} differ from expectation") return 0 diff --git a/fre/pp/rename_split_script.py b/fre/pp/rename_split_script.py index d78a09525..cc92bf78b 100644 --- a/fre/pp/rename_split_script.py +++ b/fre/pp/rename_split_script.py @@ -9,6 +9,8 @@ import yaml from metomi.isodatetime.parsers import DurationParser, TimePointParser +from fre import log_and_raise + fre_logger = logging.getLogger(__name__) duration_parser = DurationParser() time_parser = TimePointParser(assumed_time_zone=(0, 0)) @@ -51,7 +53,7 @@ def get_freq_and_format_from_two_dates(date1: cftime.datetime, date2: cftime.dat iso_freq = f"PT{int(minutes)}M" format_ = '%Y%m%d%H%M' else: - raise ValueError(f"Cannot determine frequency and format from '{date1}' and '{date2}'") + log_and_raise(f"Cannot determine frequency and format from '{date1}' and '{date2}'") fre_logger.debug(f"Comparing '{date1}' and '{date2}': returning frequency '{iso_freq}' and format '{format_}'") return iso_freq, format_ @@ -82,7 +84,7 @@ def get_duration_from_two_dates(date1: cftime.datetime, date2: cftime.datetime) if years_frac < 0.04: duration = f"P{years_round}Y" else: - raise ValueError(f"Could not determine ISO8601 duration between '{date1}' and '{date2}'") + log_and_raise(f"Could not determine ISO8601 duration between '{date1}' and '{date2}'") fre_logger.debug(f"Comparing '{date1}' and '{date2}': returning duration '{duration}'") return duration @@ -121,7 +123,7 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( var = parts[2] tile = None else: - raise ValueError(f"File '{input_file}' cannot be parsed") + log_and_raise(f"File '{input_file}' cannot be parsed") # open the nc file ds = xr.open_dataset(input_file) @@ -210,19 +212,19 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( for manifest in manifests: manifest_path = Path(manifest) if not manifest_path.exists(): - raise FileNotFoundError(f"Diag manifest '{manifest}' does not exist") + log_and_raise(f"Diag manifest '{manifest}' does not exist", FileNotFoundError) fre_logger.info(f"Using diag manifest '{manifest}'") with open(manifest_path, 'r') as f: yaml_data = yaml.safe_load(f) for diag_file in yaml_data.get("diag_files", []): if diag_file.get("file_name") == label: if found_entry is not None: - raise Exception(f"Diag file '{label}' found in multiple manifests ('{found_manifest}' and '{manifest}')") + log_and_raise(f"Diag file '{label}' found in multiple manifests ('{found_manifest}' and '{manifest}')", Exception) found_entry = diag_file found_manifest = manifest if found_entry is None: - raise Exception(f"File '{label}' not found in diag manifests") + log_and_raise(f"File '{label}' not found in diag manifests", Exception) freq_units = found_entry.get("freq_units") freq_value = found_entry.get("freq") @@ -237,7 +239,7 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( duration = f"P{freq_value}M" format_ = "%Y%m" else: - raise Exception(f"Diag manifest found but frequency units '{freq_units}' are unexpected; expected 'years' or 'months'.") + log_and_raise(f"Diag manifest found but frequency units '{freq_units}' are unexpected; expected 'years' or 'months'.", Exception) duration_object = duration_parser.parse(duration) # since only one timestep, frequency equals duration @@ -261,7 +263,7 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( freq_label = duration fre_logger.info(f"'{input_file}' has 1 timesteps without diag manifest (legacy case to be removed); date1='{date1}'; date2='{date2}'; duration='{duration}'") else: - raise ValueError(f"Diag manifest required to process input file '{input_file}' with one timestep and no time bounds") + log_and_raise(f"Diag manifest required to process input file '{input_file}' with one timestep and no time bounds") date1_str = date1.strftime(format_) date2_str = date2.strftime(format_) @@ -356,4 +358,4 @@ def rename_split(input_dir: str, output_dir: str, component: str, use_subdirs: b link_or_copy(input_file, output_file) did_something = True if not did_something: - raise FileNotFoundError(f"No '{component}' files were found in '{input_dir}'") + log_and_raise(f"No '{component}' files were found in '{input_dir}'", FileNotFoundError) diff --git a/fre/pp/run_script.py b/fre/pp/run_script.py index e8cc62da3..b45f0a41f 100644 --- a/fre/pp/run_script.py +++ b/fre/pp/run_script.py @@ -4,6 +4,7 @@ import time from . import make_workflow_name +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -31,7 +32,7 @@ def pp_run_subtool(experiment = None, platform = None, target = None, :type no_wait: boolean """ if None in [experiment, platform, target]: - raise ValueError( 'experiment, platform, and target must all not be None.' + log_and_raise( 'experiment, platform, and target must all not be None.' 'currently, their values are...' f'{experiment} / {platform} / {target}') @@ -66,6 +67,6 @@ def pp_run_subtool(experiment = None, platform = None, target = None, capture_output = True ).stdout.decode('utf-8') if not len(result): - raise Exception('Cylc scheduler was started without error but is not running after 30 seconds') + log_and_raise('Cylc scheduler was started without error but is not running after 30 seconds', Exception) fre_logger.info(result) diff --git a/fre/pp/split_netcdf_script.py b/fre/pp/split_netcdf_script.py index fed7db58e..3606cad05 100644 --- a/fre/pp/split_netcdf_script.py +++ b/fre/pp/split_netcdf_script.py @@ -90,12 +90,7 @@ def split_netcdf(inputDir, outputDir, component, history_source, use_subdirs, ydict = yaml.safe_load(Path(yamlfile).read_text()) vardict = get_variables(ydict, component) if vardict is None or history_source not in vardict.keys(): - fre_logger.error( - f"error: either component {component} not defined or " - f"source {history_source} not defined under component " - f"{component} in yamlfile {yamlfile}." - ) - raise ValueError( + log_and_raise( f"error: either component {component} not defined or " f"source {history_source} not defined under component " f"{component} in yamlfile {yamlfile}." @@ -137,12 +132,12 @@ def split_netcdf(inputDir, outputDir, component, history_source, use_subdirs, files_split += 1 fre_logger.info(f"{files_split} files split") if files_split == 0: - fre_logger.error( + log_and_raise( f"error: no files found in dirs {sd_string} under " f"{workdir} that match pattern {file_regex}; " - "no splitting took place" + "no splitting took place", + OSError ) - raise OSError else: files_split = 0 files=[os.path.join(workdir, el) for el in os.listdir(workdir) if re.match(file_regex, el) is not None] @@ -151,11 +146,11 @@ def split_netcdf(inputDir, outputDir, component, history_source, use_subdirs, split_file_xarray(infile, os.path.abspath(outputDir), varlist) files_split += 1 if len(files) == 0: - fre_logger.error( + log_and_raise( f"error: no files found in {workdir} that match pattern " - f"{file_regex}; no splitting took place" + f"{file_regex}; no splitting took place", + OSError ) - raise OSError fre_logger.info(f"split-netcdf-wrapper call complete, having split {files_split} files") sys.exit(0) #check this diff --git a/fre/pp/status_script.py b/fre/pp/status_script.py index 963fecd4a..35c17054c 100644 --- a/fre/pp/status_script.py +++ b/fre/pp/status_script.py @@ -3,6 +3,7 @@ import subprocess import logging from . import make_workflow_name +from fre import log_and_raise fre_logger = logging.getLogger(__name__) TIMEOUT_SECS=120#30 @@ -21,7 +22,7 @@ def status_subtool(experiment = None, platform = None, target = None): """ if None in [experiment, platform, target]: - raise ValueError( 'experiment, platform, and target must all not be None.' + log_and_raise( 'experiment, platform, and target must all not be None.' 'currently, their values are...' f'{experiment} / {platform} / {target}') @@ -33,4 +34,4 @@ def status_subtool(experiment = None, platform = None, target = None): try: subprocess.run(cmd, shell=True, check=True, timeout=TIMEOUT_SECS) except: - raise Exception('FAILED: subprocess call to- cylc workflow-state {name}') + log_and_raise('FAILED: subprocess call to- cylc workflow-state {name}', Exception) diff --git a/fre/pp/trigger_script.py b/fre/pp/trigger_script.py index c4e95f0bc..67fa3b762 100644 --- a/fre/pp/trigger_script.py +++ b/fre/pp/trigger_script.py @@ -4,6 +4,7 @@ import subprocess from . import make_workflow_name +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -31,7 +32,7 @@ def trigger(experiment = None, platform = None, target = None, time = None): the first chunk of a filename (19790101.atmos_tracer.tile6.nc). """ if None in [experiment, platform, target, time]: - raise ValueError( 'experiment, platform, target and time must all not be None.' + log_and_raise( 'experiment, platform, target and time must all not be None.' 'currently, their values are...' f'{experiment} / {platform} / {target} / {time}') diff --git a/fre/pp/validate_script.py b/fre/pp/validate_script.py index 7839f72d8..37f525497 100644 --- a/fre/pp/validate_script.py +++ b/fre/pp/validate_script.py @@ -3,6 +3,7 @@ import os import subprocess from . import make_workflow_name +from fre import log_and_raise def validate_subtool(experiment = None, platform = None, target = None): """ @@ -10,7 +11,7 @@ def validate_subtool(experiment = None, platform = None, target = None): ~/cylc-src/____ """ if None in [experiment, platform, target]: - raise ValueError( 'experiment, platform, and target must all not be None.' + log_and_raise( 'experiment, platform, and target must all not be None.' 'currently, their values are...' f'{experiment} / {platform} / {target}') @@ -26,7 +27,7 @@ def validate_subtool(experiment = None, platform = None, target = None): cmd = "rose macro --validate" subprocess.run(cmd, shell=True, check=True) except: - raise Exception('rose macro --validate exited non-zero') + log_and_raise('rose macro --validate exited non-zero', Exception) finally: os.chdir(go_back_here) @@ -38,6 +39,6 @@ def validate_subtool(experiment = None, platform = None, target = None): cmd = "cylc validate ." subprocess.run(cmd, shell=True, check=True) except: - raise Exception('cylc validate . exited non-zero') + log_and_raise('cylc validate . exited non-zero', Exception) finally: os.chdir(go_back_here) diff --git a/fre/run/frerun.py b/fre/run/frerun.py index 81533bd9d..507cd8e92 100644 --- a/fre/run/frerun.py +++ b/fre/run/frerun.py @@ -4,6 +4,7 @@ import click from .frerunexample import run_test_function +from fre import log_and_raise @click.group(help=click.style(" - run subcommands !!!NotImplemented!!!", fg=(164,29,132))) def run_cli(): @@ -14,4 +15,4 @@ def run_cli(): def function(uppercase): """ - Execute fre run test """ run_test_function(uppercase) - raise NotImplementedError('fre run has not been implemented yet!') + log_and_raise('fre run has not been implemented yet!', NotImplementedError) From 74a960304d6b1c6a424f0866d2c5e51c4db56dea Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:51:34 +0000 Subject: [PATCH 04/10] Add explicit ValueError to log_and_raise calls Make exception type explicit in all log_and_raise calls where ValueError was being raised (matching the original raise ValueError statements). Also fix a code structure issue in combine.py where the if/elif frequency blocks were incorrectly nested. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/app/generate_time_averages/combine.py | 6 +++-- fre/app/helpers.py | 2 +- .../remap_pp_components.py | 24 ++++++++++--------- fre/make/create_checkout_script.py | 4 ++-- fre/make/create_compile_script.py | 2 +- fre/make/create_docker_script.py | 2 +- fre/make/create_makefile_script.py | 2 +- fre/make/gfdlfremake/platformfre.py | 6 ++--- fre/make/gfdlfremake/yamlfre.py | 2 +- fre/make/make_helpers.py | 3 ++- fre/make/run_fremake_script.py | 2 +- fre/pp/checkout_script.py | 4 ++-- fre/pp/nccheck_script.py | 3 ++- fre/pp/ppval_script.py | 11 +++++---- fre/pp/rename_split_script.py | 8 +++---- 15 files changed, 44 insertions(+), 37 deletions(-) diff --git a/fre/app/generate_time_averages/combine.py b/fre/app/generate_time_averages/combine.py index c96c34812..57dfb2ad9 100644 --- a/fre/app/generate_time_averages/combine.py +++ b/fre/app/generate_time_averages/combine.py @@ -36,7 +36,7 @@ def form_bronx_directory_name(frequency: str, elif frequency == "yr": frequency_label = "annual" else: - log_and_raise(f"Frequency '{frequency}' not recognized or supported") + log_and_raise(f"Frequency '{frequency}' not recognized or supported", ValueError) return frequency_label + '_' + str(interval_object.years) + 'yr' @@ -93,7 +93,9 @@ def combine( root_in_dir: str, :rtype: None """ if frequency not in ["yr", "mon"]: - log_and_raise(f"Frequency '{frequency}' not recognized or supported") + log_and_raise(f"Frequency '{frequency}' not recognized or supported", ValueError) + + if frequency == "yr": frequency_iso = "P1Y" elif frequency == "mon": frequency_iso = "P1M" diff --git a/fre/app/helpers.py b/fre/app/helpers.py index 5f878d685..240118299 100644 --- a/fre/app/helpers.py +++ b/fre/app/helpers.py @@ -63,7 +63,7 @@ def get_variables(yml: dict, pp_comp: str) -> dict: # If the dictionary is empty (no overlap of pp components and components # in pp yaml) --> error if not src_vars: - log_and_raise(f"PP component, {pp_comp}, not found in pp yaml configuration!") + log_and_raise(f"PP component, {pp_comp}, not found in pp yaml configuration!", ValueError) return src_vars diff --git a/fre/app/remap_pp_components/remap_pp_components.py b/fre/app/remap_pp_components/remap_pp_components.py index 2a74fc93d..44bc91b74 100755 --- a/fre/app/remap_pp_components/remap_pp_components.py +++ b/fre/app/remap_pp_components/remap_pp_components.py @@ -33,13 +33,13 @@ def verify_dirs(in_dir: str, out_dir: str): if Path(in_dir).is_dir(): fre_logger.info("Input directory is a valid directory") else: - log_and_raise(f"Error: Input directory {in_dir} does not exist or is not a valid directory") + log_and_raise(f"Error: Input directory {in_dir} does not exist or is not a valid directory", ValueError) # Verify output directory exists and is a directory if Path(out_dir).is_dir(): fre_logger.info("Output directory is a valid directory") else: - log_and_raise(f"Error: Output directory {out_dir} does not exist or is not a valid directory") + log_and_raise(f"Error: Output directory {out_dir} does not exist or is not a valid directory", ValueError) def create_dir(out_dir: str, comp: str, freq: str, chunk:str, ens:str, dir_ts: bool) -> str: """ @@ -116,7 +116,7 @@ def freq_to_legacy(iso_dura: str) -> str: elif iso_dura in ['PT30M', 'PT0.5H']: freq_legacy = '30min' else: - log_and_raise(f"Could not convert ISO duration '{iso_dura}'") + log_and_raise(f"Could not convert ISO duration '{iso_dura}'", ValueError) return freq_legacy @@ -162,7 +162,7 @@ def freq_to_date_format(iso_freq: str) -> str: elif (iso_freq[:2]=='PT') and (iso_freq[-1:]=='H'): return 'CCYYMMDDThh' else: - log_and_raise(f'ERROR: Unknown Frequency {iso_freq}') + log_and_raise(f'ERROR: Unknown Frequency {iso_freq}', ValueError) def truncate_date(date: str, freq: str) -> str: """ @@ -232,7 +232,7 @@ def search_files(product: str, var: list, source: str, freq: str, fre_logger.info("var: %s", v) f = glob.glob(f"{source}.{v}*.nc") if not f: #if glob returns empty list - log_and_raise("Variable {v} could not be found or does not exist.") + log_and_raise("Variable {v} could not be found or does not exist.", ValueError) files.extend(f) else: if product == "ts": @@ -241,7 +241,7 @@ def search_files(product: str, var: list, source: str, freq: str, elif product == "av": date = truncate_date(begin, "P1Y") else: - log_and_raise("Product not set to ts or av.") + log_and_raise("Product not set to ts or av.", ValueError) if var == "all": f = glob.glob(f"{source}.{date}-*.*.nc") @@ -251,7 +251,7 @@ def search_files(product: str, var: list, source: str, freq: str, fre_logger.info("var: %s", v) f = glob.glob(f"{source}.{date}-*.{v}*.nc") if not f: #if glob returns empty list - log_and_raise("Variable {v} could not be found or does not exist.") + log_and_raise("Variable {v} could not be found or does not exist.", ValueError) files.extend(f) if product == "av" and current_chunk == "P1Y": f = glob.glob(f"{source}.{date}.*.nc") @@ -279,7 +279,8 @@ def get_varlist(comp_info: dict, product: str, req_source: str, src_vars: dict) if comp_info.get("static") is None: log_and_raise( f"Product is set to static but no static sources/variables defined for " - f"{comp_info.get('type')}" + f"{comp_info.get('type')}", + ValueError ) ## Dictionary of variables associated with pp component source name @@ -543,9 +544,9 @@ def remap_pp_components(input_dir: str, output_dir: str, begin_date: str, curren if not files: if ens_mem is not None: - log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{ens_mem}/{s}/{f}/{c}") + log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{ens_mem}/{s}/{f}/{c}", ValueError) else: - log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{s}/{f}/{c}") + log_and_raise(f"\nError: No input files found in {input_dir}/{g}/{s}/{f}/{c}", ValueError) os.chdir(output_dir) @@ -593,7 +594,8 @@ def remap_pp_components(input_dir: str, output_dir: str, begin_date: str, curren if not Path(src_file).exists(): log_and_raise("Offline diagnostic file defined but " f"{src_file} does not exist or cannot " - "be found!") + "be found!", + ValueError) offline_link = ["ln", "-s", diff --git a/fre/make/create_checkout_script.py b/fre/make/create_checkout_script.py index 02ccf6126..615e463a1 100644 --- a/fre/make/create_checkout_script.py +++ b/fre/make/create_checkout_script.py @@ -116,7 +116,7 @@ def checkout_create(yamlfile: str, platform: tuple, target: tuple, jobs_str = str(njobs) if isinstance(njobs, bool) and execute: - log_and_raise('njobs must be defined as a number if --execute flag is True') + log_and_raise('njobs must be defined as a number if --execute flag is True', ValueError) # Determine backgrounding syntax # parallel_cmd is the suffix added to shell commands @@ -152,7 +152,7 @@ def checkout_create(yamlfile: str, platform: tuple, target: tuple, for platform_name in platform: if not model_yaml.platforms.hasPlatform(platform_name): - log_and_raise(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml", ValueError) platform_info = model_yaml.platforms.getPlatformFromName(platform_name) diff --git a/fre/make/create_compile_script.py b/fre/make/create_compile_script.py index 4a50964fd..a66a22bd8 100644 --- a/fre/make/create_compile_script.py +++ b/fre/make/create_compile_script.py @@ -102,7 +102,7 @@ def compile_create(yamlfile:str, platform:tuple[str], target:tuple[str], makejob for target_name in tlist: target = targetfre.fretarget(target_name) if not model_yaml.platforms.hasPlatform(platform_name): - log_and_raise(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml", ValueError) platform = model_yaml.platforms.getPlatformFromName(platform_name) ## Make the bld_dir based on the modelRoot, the platform, and the target diff --git a/fre/make/create_docker_script.py b/fre/make/create_docker_script.py index 70d670ec0..29cd41f74 100644 --- a/fre/make/create_docker_script.py +++ b/fre/make/create_docker_script.py @@ -80,7 +80,7 @@ def dockerfile_create(yamlfile: str, platform: tuple[str], target: tuple[str], for targetName in tlist: targetObject = targetfre.fretarget(targetName) if not modelYaml.platforms.hasPlatform(platformName): - log_and_raise(f"{platformName} does not exist in platforms.yaml") + log_and_raise(f"{platformName} does not exist in platforms.yaml", ValueError) platform = modelYaml.platforms.getPlatformFromName(platformName) diff --git a/fre/make/create_makefile_script.py b/fre/make/create_makefile_script.py index 47bc96349..cbaf0ce18 100644 --- a/fre/make/create_makefile_script.py +++ b/fre/make/create_makefile_script.py @@ -93,7 +93,7 @@ def makefile_create(yamlfile: str, platform: tuple[str], target: tuple[str]): if model_yaml.platforms.hasPlatform(platform_name): pass else: - log_and_raise(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml", ValueError) platform=model_yaml.platforms.getPlatformFromName(platform_name) ## Make the bld_dir based on the modelRoot, the platform, and the target diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index 898404cdc..854121931 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -55,7 +55,7 @@ def __init__(self,platforminfo): except: log_and_raise("Platform "+p["name"]+": You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n", Exception) if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": - log_and_raise("Platform "+p["name"]+": Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") + log_and_raise("Platform "+p["name"]+": Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n", ValueError) ## Get the name of the base container try: p["containerBase"] @@ -80,7 +80,7 @@ def __init__(self,platforminfo): except: p["container2base"] = "" else: - log_and_raise("Platform "+p["name"]+": You defined container2base "+p["container2base"]+" but container2step is False\n") + log_and_raise("Platform "+p["name"]+": You defined container2base "+p["container2base"]+" but container2step is False\n", ValueError) ## Get any commands to execute in the dockerfile RUN command try: p["RUNenv"] @@ -109,7 +109,7 @@ def __init__(self,platforminfo): try: p["mkTemplate"] except: - log_and_raise("The platform "+p["name"]+" must specify a mkTemplate \n") + log_and_raise("The platform "+p["name"]+" must specify a mkTemplate \n", ValueError) def hasPlatform(self,name): """ diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index aca681deb..ef72ee40e 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -33,7 +33,7 @@ def __init__(self,compileinfo): self.yaml = compileinfo # Check if self.yaml is None if self.yaml is None: - log_and_raise("The provided compileinfo is None. It must be a valid dictionary.") + log_and_raise("The provided compileinfo is None. It must be a valid dictionary.", ValueError) ## Check for required experiment name try: self.yaml["experiment"] diff --git a/fre/make/make_helpers.py b/fre/make/make_helpers.py index c49d3c7ee..0048a312d 100644 --- a/fre/make/make_helpers.py +++ b/fre/make/make_helpers.py @@ -42,7 +42,8 @@ def get_mktemplate_path(mk_template: str, container_flag: bool, model_root: str # Check in template path exists if not Path(template_path).exists(): log_and_raise("Error w/ mkmf template. Created path from given " - f"filename: {template_path} does not exist.") + f"filename: {template_path} does not exist.", + ValueError) else: if "/" not in mk_template: template_path = model_root+"/mkmf/templates/"+mk_template diff --git a/fre/make/run_fremake_script.py b/fre/make/run_fremake_script.py index a11ac852c..6799201d1 100644 --- a/fre/make/run_fremake_script.py +++ b/fre/make/run_fremake_script.py @@ -90,7 +90,7 @@ def fremake_run(yamlfile:str, platform:str, target:str, container_platforms = () for platform_name in plist: if not model_yaml.platforms.hasPlatform(platform_name): - log_and_raise(f"{platform_name} does not exist in platforms.yaml") + log_and_raise(f"{platform_name} does not exist in platforms.yaml", ValueError) platform_info = model_yaml.platforms.getPlatformFromName(platform_name) diff --git a/fre/pp/checkout_script.py b/fre/pp/checkout_script.py index 8262feec0..63049dc7d 100644 --- a/fre/pp/checkout_script.py +++ b/fre/pp/checkout_script.py @@ -52,7 +52,7 @@ def checkout_template(experiment = None, platform = None, target = None, branch if None in [experiment, platform, target]: os.chdir(go_back_here) log_and_raise( 'one of these are None: experiment / platform / target = \n' - f'{experiment} / {platform} / {target}' ) + f'{experiment} / {platform} / {target}', ValueError) #name = f"{experiment}__{platform}__{target}" workflow_name = make_workflow_name(experiment, platform, target) @@ -97,7 +97,7 @@ def checkout_template(experiment = None, platform = None, target = None, branch fre_logger.info( f"ERROR: current branch is '{current_branch}', current tag-describe is '{current_tag}'") os.chdir(go_back_here) - log_and_raise('neither tag nor branch matches the git clone branch arg') + log_and_raise('neither tag nor branch matches the git clone branch arg', ValueError) # make sure we are back where we should be if os.getcwd() != go_back_here: diff --git a/fre/pp/nccheck_script.py b/fre/pp/nccheck_script.py index 985e77754..ec9df1583 100644 --- a/fre/pp/nccheck_script.py +++ b/fre/pp/nccheck_script.py @@ -42,5 +42,6 @@ def check(file_path: str, num_steps: int): log_and_raise( f" Unexpected number of timesteps found in {file_path}. " f"Found: {num_actual_steps} timesteps " - f"Expected: {num_steps} timesteps" + f"Expected: {num_steps} timesteps", + ValueError ) diff --git a/fre/pp/ppval_script.py b/fre/pp/ppval_script.py index 34750e26f..8a542ba19 100644 --- a/fre/pp/ppval_script.py +++ b/fre/pp/ppval_script.py @@ -122,7 +122,7 @@ def getenot(date_start: str, enot = (diff.days + 1) * 48 else: - log_and_raise(f"Unknown chunk_type '{chunk_type}'") + log_and_raise(f"Unknown chunk_type '{chunk_type}'", ValueError) fre_logger.debug( f"date start: {date_start}; date end: {date_end}; " @@ -185,7 +185,7 @@ def validate(filepath: str): try: cftime.datetime(1,1,1, calendar = cal) except: - log_and_raise(f" Calendar name must follow cf convention for validation. {cal} is not a valid calendar.") + log_and_raise(f" Calendar name must follow cf convention for validation. {cal} is not a valid calendar.", ValueError) # date_{end,start} will have a total of date_end.lastindex groups, # that are the year (date_end[1]), the month (date_end[2]), the @@ -238,18 +238,19 @@ def validate(filepath: str): f" Cannot determine frequency from {filepath}. Sub-daily" " files must at minimum be placed in a directory" " corresponding to data frequency: '6hr, 'PT6H', '3hr," - " 'PT3H', '1hr, 'PT1H', '30min, 'PT30M, 'PT0.5H'" + " 'PT3H', '1hr, 'PT1H', '30min, 'PT30M, 'PT0.5H'", + ValueError ) elif date_length == 12: enot = getenot(date_start, date_end, '30minute', cal) else: - log_and_raise(f"Cannot determine frequency for date '{date_start}'") + log_and_raise(f"Cannot determine frequency for date '{date_start}'", ValueError) try: ncc.check(filepath, enot) except: - log_and_raise(f"Timesteps found in {filepath} differ from expectation") + log_and_raise(f"Timesteps found in {filepath} differ from expectation", ValueError) return 0 diff --git a/fre/pp/rename_split_script.py b/fre/pp/rename_split_script.py index cc92bf78b..dcd177a13 100644 --- a/fre/pp/rename_split_script.py +++ b/fre/pp/rename_split_script.py @@ -53,7 +53,7 @@ def get_freq_and_format_from_two_dates(date1: cftime.datetime, date2: cftime.dat iso_freq = f"PT{int(minutes)}M" format_ = '%Y%m%d%H%M' else: - log_and_raise(f"Cannot determine frequency and format from '{date1}' and '{date2}'") + log_and_raise(f"Cannot determine frequency and format from '{date1}' and '{date2}'", ValueError) fre_logger.debug(f"Comparing '{date1}' and '{date2}': returning frequency '{iso_freq}' and format '{format_}'") return iso_freq, format_ @@ -84,7 +84,7 @@ def get_duration_from_two_dates(date1: cftime.datetime, date2: cftime.datetime) if years_frac < 0.04: duration = f"P{years_round}Y" else: - log_and_raise(f"Could not determine ISO8601 duration between '{date1}' and '{date2}'") + log_and_raise(f"Could not determine ISO8601 duration between '{date1}' and '{date2}'", ValueError) fre_logger.debug(f"Comparing '{date1}' and '{date2}': returning duration '{duration}'") return duration @@ -123,7 +123,7 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( var = parts[2] tile = None else: - log_and_raise(f"File '{input_file}' cannot be parsed") + log_and_raise(f"File '{input_file}' cannot be parsed", ValueError) # open the nc file ds = xr.open_dataset(input_file) @@ -263,7 +263,7 @@ def rename_file(input_file: str, diag_manifest: tuple[str, ...] | str | None = ( freq_label = duration fre_logger.info(f"'{input_file}' has 1 timesteps without diag manifest (legacy case to be removed); date1='{date1}'; date2='{date2}'; duration='{duration}'") else: - log_and_raise(f"Diag manifest required to process input file '{input_file}' with one timestep and no time bounds") + log_and_raise(f"Diag manifest required to process input file '{input_file}' with one timestep and no time bounds", ValueError) date1_str = date1.strftime(format_) date2_str = date2.strftime(format_) From 72ec23c921b6d9927a87aca87cb3aa1118244dcb Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:57:13 +0000 Subject: [PATCH 05/10] Fix code review issues: restore interval_object assignment, add explicit ValueError to log_and_raise calls - Restore accidentally removed interval_object = duration_parser.parse(interval) in combine.py form_bronx_directory_name() - Add explicit ValueError type to all log_and_raise() calls that relied on the default parameter: frenctoolsTimeAverager.py, generate_time_averages.py, wrapper.py, mask_atmos_plevel.py, platformfre.py, targetfre.py, cmor_yamler.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/app/generate_time_averages/combine.py | 1 + .../generate_time_averages/frenctoolsTimeAverager.py | 10 +++++----- .../generate_time_averages/generate_time_averages.py | 4 ++-- fre/app/generate_time_averages/wrapper.py | 2 +- fre/app/mask_atmos_plevel/mask_atmos_plevel.py | 2 +- fre/cmor/cmor_yamler.py | 6 +++--- fre/make/gfdlfremake/platformfre.py | 2 +- fre/make/gfdlfremake/targetfre.py | 6 +++--- 8 files changed, 17 insertions(+), 16 deletions(-) diff --git a/fre/app/generate_time_averages/combine.py b/fre/app/generate_time_averages/combine.py index 57dfb2ad9..2bbfbfa99 100644 --- a/fre/app/generate_time_averages/combine.py +++ b/fre/app/generate_time_averages/combine.py @@ -37,6 +37,7 @@ def form_bronx_directory_name(frequency: str, frequency_label = "annual" else: log_and_raise(f"Frequency '{frequency}' not recognized or supported", ValueError) + interval_object = duration_parser.parse(interval) return frequency_label + '_' + str(interval_object.years) + 'yr' diff --git a/fre/app/generate_time_averages/frenctoolsTimeAverager.py b/fre/app/generate_time_averages/frenctoolsTimeAverager.py index 00b05cc36..9bbf4e16b 100644 --- a/fre/app/generate_time_averages/frenctoolsTimeAverager.py +++ b/fre/app/generate_time_averages/frenctoolsTimeAverager.py @@ -36,7 +36,7 @@ def generate_timavg(self, infile=None, outfile=None): """ exitstatus=1 if self.avg_type not in ['month','all']: - log_and_raise(f'avg_type= {self.avg_type} not supported by this class at this time.') + log_and_raise(f'avg_type= {self.avg_type} not supported by this class at this time.', ValueError) if self.unwgt: fre_logger.warning('unwgt=True unsupported by frenctoolsAverager. Ignoring!!!') @@ -46,7 +46,7 @@ def generate_timavg(self, infile=None, outfile=None): ' not currently supported for frenctols time averaging. ignoring!', self.var) if infile is None: - log_and_raise('Need an input file, specify a value for the infile argument') + log_and_raise('Need an input file, specify a value for the infile argument', ValueError) if outfile is None: outfile='frenctoolsTimeAverage_'+infile @@ -54,7 +54,7 @@ def generate_timavg(self, infile=None, outfile=None): #check for existence of timavg.csh. If not found, issue might be that user is not in env with frenctools. if shutil.which('timavg.csh') is None: - log_and_raise('did not find timavg.csh') + log_and_raise('did not find timavg.csh', ValueError) fre_logger.info('timeaverager using: %s', shutil.which("timavg.csh")) @@ -107,7 +107,7 @@ def generate_timavg(self, infile=None, outfile=None): if subp.returncode != 0: fre_logger.error('stderror = %s', stderror) - log_and_raise(f'error: timavg.csh had a problem, subp.returncode = {subp.returncode}') + log_and_raise(f'error: timavg.csh had a problem, subp.returncode = {subp.returncode}', ValueError) fre_logger.info('%s climatology successfully ran',nc_monthly_file) exitstatus=0 @@ -131,7 +131,7 @@ def generate_timavg(self, infile=None, outfile=None): if subp.returncode != 0: fre_logger.error('stderror = %s', stderror) - log_and_raise(f'error: timavgcsh command not properly executed, subp.returncode = {subp.returncode}') + log_and_raise(f'error: timavgcsh command not properly executed, subp.returncode = {subp.returncode}', ValueError) fre_logger.info('climatology successfully ran') exitstatus = 0 diff --git a/fre/app/generate_time_averages/generate_time_averages.py b/fre/app/generate_time_averages/generate_time_averages.py index 196fb0a47..bafaf1cf9 100755 --- a/fre/app/generate_time_averages/generate_time_averages.py +++ b/fre/app/generate_time_averages/generate_time_averages.py @@ -42,9 +42,9 @@ def generate_time_average(infile: Union[str, List[str]] = None, start_time = time.perf_counter() fre_logger.debug('called generate_time_average') if None in [infile, outfile, pkg]: - log_and_raise('infile, outfile, and pkg are required inputs') + log_and_raise('infile, outfile, and pkg are required inputs', ValueError) if pkg not in ['cdo', 'fre-nctools', 'fre-python-tools']: - log_and_raise(f'argument pkg = {pkg} not known, must be one of: cdo, fre-nctools, fre-python-tools') + log_and_raise(f'argument pkg = {pkg} not known, must be one of: cdo, fre-nctools, fre-python-tools', ValueError) exitstatus = 1 myavger = None diff --git a/fre/app/generate_time_averages/wrapper.py b/fre/app/generate_time_averages/wrapper.py index 2609be7e5..13fa3e551 100644 --- a/fre/app/generate_time_averages/wrapper.py +++ b/fre/app/generate_time_averages/wrapper.py @@ -78,7 +78,7 @@ def generate_wrapper(cycle_point: str, input_interval = DurationParser().parse(input_interval) if frequency not in ["yr", "mon"]: - log_and_raise(f"Frequency '{frequency}' not recognized or supported") + log_and_raise(f"Frequency '{frequency}' not recognized or supported", ValueError) # convert frequency 'yr' or 'mon' to ISO8601 if frequency == 'mon': diff --git a/fre/app/mask_atmos_plevel/mask_atmos_plevel.py b/fre/app/mask_atmos_plevel/mask_atmos_plevel.py index 9f47a0f91..df00d9f92 100755 --- a/fre/app/mask_atmos_plevel/mask_atmos_plevel.py +++ b/fre/app/mask_atmos_plevel/mask_atmos_plevel.py @@ -64,7 +64,7 @@ def mask_atmos_plevel_subtool(infile: str = None, if "ps" not in list(ds_ps.variables): fre_logger.warning('pressure variable ps not found in target pressure file') if not warn_no_ps: - log_and_raise(f"Surface pressure file {psfile} does not contain surface pressure.") + log_and_raise(f"Surface pressure file {psfile} does not contain surface pressure.", ValueError) fre_logger.warning('warn_no_ps is True! this means I\'m going to no-op gracefully instead of raising an error') return diff --git a/fre/cmor/cmor_yamler.py b/fre/cmor/cmor_yamler.py index a4549dece..414fb284e 100644 --- a/fre/cmor/cmor_yamler.py +++ b/fre/cmor/cmor_yamler.py @@ -183,7 +183,7 @@ def cmor_yaml_subtool( yamlfile: str = None, log_and_raise( f'freq is required for CMIP7 but was not specified for table target {table_name}.\n' f' CMIP7 MIP tables do not contain frequency metadata.\n' - f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.') + f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.', ValueError) # CMIP6 tables carry frequency — attempt to derive it fre_logger.info('freq not specified in cmor yaml for table %s, ' 'attempting to derive from CMIP6 MIP table', table_name) @@ -195,7 +195,7 @@ def cmor_yaml_subtool( yamlfile: str = None, log_and_raise( f'not enough frequency information to process variables for {table_name}.\n' f' freq was not specified in the cmor yaml, and could not be derived from the MIP table.\n' - f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.') + f' please set freq explicitly (e.g. "monthly", "daily") in the cmor yaml.', ValueError) fre_logger.info('derived freq = %s from MIP table %s', freq, json_mip_table_config) # update the table target dict so downstream code sees the resolved freq @@ -211,7 +211,7 @@ def cmor_yaml_subtool( yamlfile: str = None, grid_desc = gridding_dict['grid_desc'] nom_res = gridding_dict['nom_res'] if None in [grid_label, grid_desc, nom_res]: - log_and_raise('gridding dictionary, if present, must have all three fields be non-empty.') + log_and_raise('gridding dictionary, if present, must have all three fields be non-empty.', ValueError) # gridding info of data ---- revisit table_components_list = cmor_yaml_table_target['target_components'] diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index 854121931..29bc2a966 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -97,7 +97,7 @@ def __init__(self,platforminfo): except: log_and_raise("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n", Exception) if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": - log_and_raise("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") + log_and_raise("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n", ValueError) ## Get the path to where the output model container will be located try: diff --git a/fre/make/gfdlfremake/targetfre.py b/fre/make/gfdlfremake/targetfre.py index 4f5363228..37519e9ec 100644 --- a/fre/make/gfdlfremake/targetfre.py +++ b/fre/make/gfdlfremake/targetfre.py @@ -54,19 +54,19 @@ def __init__(self,t): if self.debug: try: if self.repro or self.prod == True: - log_and_raise(errormsg) + log_and_raise(errormsg, ValueError) except ValueError: raise elif self.repro: try: if self.prod == True: - log_and_raise(errormsg) + log_and_raise(errormsg, ValueError) except ValueError: raise else: try: if self.prod == False: - log_and_raise("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") + log_and_raise("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug", ValueError) except ValueError: raise From e184940bcaa4c12f1c034b2eef7701f99df5ab7f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 06:11:17 +0000 Subject: [PATCH 06/10] Convert raise statements to log_and_raise across 11 files Replace all raise ExcType(msg) and raise ExcType(msg) from exc patterns with log_and_raise(msg, ExcType) and log_and_raise(msg, ExcType, exc=exc) respectively, across: - fre/pp/configure_script_yaml.py (already had import) - fre/cmor/cmor_helpers.py (already had import) - fre/cmor/cmor_mixer.py (already had import) - fre/cmor/cmor_finder.py (added import) - fre/cmor/cmor_config.py (added import) - fre/yamltools/helpers.py (added import) - fre/yamltools/info_parsers/cmor_info_parser.py (added import) - fre/yamltools/info_parsers/compile_info_parser.py (added import) - fre/yamltools/info_parsers/analysis_info_parser.py (added import) - fre/yamltools/info_parsers/pp_info_parser.py (added import) - fre/yamltools/combine_yamls_script.py (added import) Also converts printf-style multi-arg ValueError/FileNotFoundError calls to f-string log_and_raise equivalents. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/cmor/cmor_config.py | 11 ++-- fre/cmor/cmor_finder.py | 13 ++-- fre/cmor/cmor_helpers.py | 23 ++++--- fre/cmor/cmor_mixer.py | 61 ++++++++++--------- fre/pp/configure_script_yaml.py | 12 ++-- fre/yamltools/combine_yamls_script.py | 11 ++-- fre/yamltools/helpers.py | 11 ++-- .../info_parsers/analysis_info_parser.py | 11 ++-- .../info_parsers/cmor_info_parser.py | 17 +++--- .../info_parsers/compile_info_parser.py | 11 ++-- fre/yamltools/info_parsers/pp_info_parser.py | 13 ++-- 11 files changed, 103 insertions(+), 91 deletions(-) diff --git a/fre/cmor/cmor_config.py b/fre/cmor/cmor_config.py index e49fb0bdd..a2f638897 100644 --- a/fre/cmor/cmor_config.py +++ b/fre/cmor/cmor_config.py @@ -18,6 +18,7 @@ import logging from pathlib import Path +from fre import log_and_raise from .cmor_finder import make_simple_varlist from .cmor_constants import EXCLUDED_TABLE_SUFFIXES @@ -103,11 +104,11 @@ def cmor_config_subtool( """ # ---- validate inputs ---- if not Path(pp_dir).is_dir(): - raise FileNotFoundError(f'pp_dir does not exist: {pp_dir}') + log_and_raise(f'pp_dir does not exist: {pp_dir}', FileNotFoundError) if not Path(mip_tables_dir).is_dir(): - raise FileNotFoundError(f'mip_tables_dir does not exist: {mip_tables_dir}') + log_and_raise(f'mip_tables_dir does not exist: {mip_tables_dir}', FileNotFoundError) if not Path(exp_config).is_file(): - raise FileNotFoundError(f'exp_config does not exist: {exp_config}') + log_and_raise(f'exp_config does not exist: {exp_config}', FileNotFoundError) # ensure output directories exist Path(varlist_dir).mkdir(parents=True, exist_ok=True) @@ -116,8 +117,8 @@ def cmor_config_subtool( # ---- gather MIP tables ---- mip_tables = _filter_mip_tables(mip_tables_dir, mip_era) if not mip_tables: - raise ValueError( - f'no MIP tables found in {mip_tables_dir} for era {mip_era} after filtering') + log_and_raise( + f'no MIP tables found in {mip_tables_dir} for era {mip_era} after filtering', ValueError) # ---- discover pp components ---- ppcompdirs = sorted(glob.glob(f'{pp_dir}/*')) diff --git a/fre/cmor/cmor_finder.py b/fre/cmor/cmor_finder.py index 55e6ee45a..023928c2d 100644 --- a/fre/cmor/cmor_finder.py +++ b/fre/cmor/cmor_finder.py @@ -26,6 +26,7 @@ from typing import Optional, Dict, IO from .cmor_helpers import get_json_file_data +from fre import log_and_raise from .cmor_constants import DO_NOT_PRINT_LIST fre_logger = logging.getLogger(__name__) @@ -97,12 +98,12 @@ def cmor_find_subtool( json_var_list: Optional[str] = None, CMIP6 tables. Information is printed via the logger. """ if not Path(json_table_config_dir).exists(): - raise OSError(f'ERROR directory {json_table_config_dir} does not exist! exit.') + log_and_raise(f'ERROR directory {json_table_config_dir} does not exist! exit.', OSError) fre_logger.info('attempting to find and open files in dir: \n %s ', json_table_config_dir) json_table_configs = glob.glob(f'{json_table_config_dir}/*.json') if not json_table_configs: - raise OSError(f'ERROR directory {json_table_config_dir} contains no JSON files, exit.') + log_and_raise(f'ERROR directory {json_table_config_dir} contains no JSON files, exit.', OSError) fre_logger.info('found content in json_table_config_dir') var_list = None @@ -111,7 +112,7 @@ def cmor_find_subtool( json_var_list: Optional[str] = None, var_list = json.load(var_list_file) if opt_var_name is None and var_list is None: - raise ValueError('ERROR: no opt_var_name given but also no content in variable list!!! exit!') + log_and_raise('ERROR: no opt_var_name given but also no content in variable list!!! exit!', ValueError) if opt_var_name is not None: fre_logger.info('opt_var_name is not None: looking for only ONE variables worth of info!') @@ -172,8 +173,8 @@ def make_simple_varlist( dir_targ: str, full_mip_vars_list=get_json_file_data(json_mip_table)["variable_entry"].keys() except Exception as exc: - raise Exception( 'problem opening mip table and getting variable entry data.' - f'exc = {exc}') from exc + log_and_raise('problem opening mip table and getting variable entry data.' + f'exc = {exc}', Exception, exc=exc) fre_logger.debug('attempting to make mip variable list') mip_vars=[ key.split('_')[0] for key in full_mip_vars_list ] @@ -201,5 +202,5 @@ def make_simple_varlist( dir_targ: str, with open(output_variable_list, 'w', encoding='utf-8') as f: json.dump(var_list, f, indent=4) except Exception as exc: - raise OSError('output variable list created but cannot be written') from exc + log_and_raise('output variable list created but cannot be written', OSError, exc=exc) return var_list diff --git a/fre/cmor/cmor_helpers.py b/fre/cmor/cmor_helpers.py index f60425eb3..3276562af 100644 --- a/fre/cmor/cmor_helpers.py +++ b/fre/cmor/cmor_helpers.py @@ -247,7 +247,7 @@ def create_lev_bnds( bound_these: Variable = None, .. note:: Logs debug information about the input and output arrays. """ if len(with_these) != (len(bound_these) + 1): - raise ValueError('failed creating bnds on-the-fly :-(') + log_and_raise('failed creating bnds on-the-fly :-(', ValueError) fre_logger.debug('bound_these = \n%s', bound_these) fre_logger.debug('with_these = \n%s', with_these) @@ -294,8 +294,7 @@ def get_iso_datetime_ranges( var_filenames: List[str], fre_logger.debug(' stop_yr_int = %s', stop_yr_int) if iso_daterange_arr is None: - raise ValueError( - 'this function requires the list one desires to fill with datetime ranges from filenames') + log_and_raise('this function requires the list one desires to fill with datetime ranges from filenames', ValueError) for filename in var_filenames: fre_logger.debug('filename = %s', filename) @@ -316,7 +315,7 @@ def get_iso_datetime_ranges( var_filenames: List[str], iso_daterange_arr.sort() if len(iso_daterange_arr) < 1: - raise ValueError('iso_daterange_arr has length 0! i need to find at least one datetime range!') + log_and_raise('iso_daterange_arr has length 0! i need to find at least one datetime range!', ValueError) def check_dataset_for_ocean_grid( ds: Dataset) -> bool: @@ -406,7 +405,7 @@ def create_tmp_dir( outdir: str, fre_logger.info('attempting to create %s dir in tmp_dir targ did not work', outdir_from_exp_config) fre_logger.info('... attempt to avoid a toothless cmor warning failed... moving on') except Exception as exc: - raise OSError('problem creating tmp output directory {}. stop.'.format(tmp_dir)) from exc + log_and_raise('problem creating tmp output directory {}. stop.'.format(tmp_dir), OSError, exc=exc) return tmp_dir @@ -425,10 +424,10 @@ def get_json_file_data( json_file_path: Optional[str] = None) -> dict: with open(json_file_path, "r", encoding="utf-8") as json_config_file: return json.load(json_config_file) except Exception as exc: - raise FileNotFoundError( + log_and_raise( 'ERROR: json_file_path file cannot be opened.\n' - ' json_file_path = {}'.format(json_file_path) - ) from exc + ' json_file_path = {}'.format(json_file_path), + FileNotFoundError, exc=exc) def update_grid_and_label( json_file_path: str, @@ -573,7 +572,7 @@ def check_path_existence(some_path: str): :raises FileNotFoundError: If the path does not exist. """ if not Path(some_path).exists(): - raise FileNotFoundError(f'does not exist: {some_path}') + log_and_raise(f'does not exist: {some_path}', FileNotFoundError) def iso_to_bronx_chunk(cmor_chunk_in: str) -> str: """ @@ -589,7 +588,7 @@ def iso_to_bronx_chunk(cmor_chunk_in: str) -> str: if cmor_chunk_in[0] == 'P' and cmor_chunk_in[-1] == 'Y': bronx_chunk = f'{cmor_chunk_in[1:-1]}yr' else: - raise ValueError('problem with converting to bronx chunk from the cmor chunk. check cmor_yamler.py') + log_and_raise('problem with converting to bronx chunk from the cmor chunk. check cmor_yamler.py', ValueError) fre_logger.debug('bronx_chunk = %s', bronx_chunk) return bronx_chunk @@ -625,7 +624,7 @@ def conv_mip_to_bronx_freq(cmor_table_freq: str) -> Optional[str]: if bronx_freq is None: fre_logger.warning('MIP table frequency = %s does not have a FRE-bronx equivalent', cmor_table_freq) if cmor_table_freq not in cmor_to_bronx_dict.keys(): - raise KeyError(f'MIP table frequency = "{cmor_table_freq}" is not a valid MIP frequency') + log_and_raise(f'MIP table frequency = "{cmor_table_freq}" is not a valid MIP frequency', KeyError) return bronx_freq def get_bronx_freq_from_mip_table(json_table_config: str) -> str: @@ -646,7 +645,7 @@ def get_bronx_freq_from_mip_table(json_table_config: str) -> str: table_freq = table_config_data['variable_entry'][var_entry]['frequency'] break except Exception as exc: - raise KeyError('no frequency in table under variable_entry. this may be a CMIP7 table.') from exc + log_and_raise('no frequency in table under variable_entry. this may be a CMIP7 table.', KeyError, exc=exc) bronx_freq = conv_mip_to_bronx_freq(table_freq) return bronx_freq diff --git a/fre/cmor/cmor_mixer.py b/fre/cmor/cmor_mixer.py index ae5d0611f..7dbf78adb 100755 --- a/fre/cmor/cmor_mixer.py +++ b/fre/cmor/cmor_mixer.py @@ -208,8 +208,8 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, with open(json_exp_config, "r", encoding="utf-8") as file: exp_cfg_calendar = json.load(file)['calendar'] if exp_cfg_calendar != time_coords_calendar: - raise ValueError(f"data calendar type {time_coords_calendar} " - f"does not match input config calendar type: {exp_cfg_calendar}") + log_and_raise(f"data calendar type {time_coords_calendar} " + f"does not match input config calendar type: {exp_cfg_calendar}", ValueError) # read in time_bnds, if present fre_logger.info('attempting to read coordinate BNDS, time_bnds') @@ -224,7 +224,7 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, lev_bnds = None if vert_dim != 0: if vert_dim.lower() not in ACCEPTED_VERT_DIMS: - raise ValueError(f'var_dim={var_dim}, vert_dim = {vert_dim} is not supported') #uncovered + log_and_raise(f'var_dim={var_dim}, vert_dim = {vert_dim} is not supported', ValueError) #uncovered lev = ds[vert_dim] if vert_dim.lower() != "landuse": lev_units = ds[vert_dim].units @@ -255,7 +255,7 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, 'an ocean statics file is needed, but it could not be found.\n' ' moving on and doing my best, but I am probably going to break' ) - raise FileNotFoundError('statics file not found.') from exc + log_and_raise('statics file not found.', FileNotFoundError, exc=exc) fre_logger.info("statics file found.") @@ -355,14 +355,14 @@ def rewrite_netcdf_file_var( mip_var_cfgs: dict = None, if any( [yh_dim != (yq_dim - 1), xh_dim != (xq_dim - 1)]): - raise ValueError( #uncovered + log_and_raise( #uncovered 'the number of h-point lat/lon coordinates is inconsistent with the number of\n' 'q-point lat/lon coordinates! i.e. ( hpoint_dim != qpoint_dim-1 )\n' f'yh_dim = {yh_dim}\n' f'xh_dim = {xh_dim}\n' f'yq_dim = {yq_dim}\n' - f'xq_dim = {xq_dim}' - ) + f'xq_dim = {xq_dim}', + ValueError) # create h-point bounds from the q-point lat lons fre_logger.info('creating yh_bnds, xh_bnds from yq, xq') @@ -763,14 +763,14 @@ def cmorize_target_var_files(indir: str = None, make_cmor_write_here = tmp_dir # make sure we know where we are writing, or else! if not Path(make_cmor_write_here).exists(): - raise ValueError(f'\ntmp_dir = \n{tmp_dir}\ncannot be found/created/resolved!') #uncovered + log_and_raise(f'\ntmp_dir = \n{tmp_dir}\ncannot be found/created/resolved!', ValueError) #uncovered gotta_go_back_here = os.getcwd() try: fre_logger.warning("changing directory to: \n%s", make_cmor_write_here) os.chdir(make_cmor_write_here) except Exception as exc: #uncovered - raise OSError(f'(cmorize_target_var_files) could not chdir to {make_cmor_write_here}') from exc + log_and_raise(f'(cmorize_target_var_files) could not chdir to {make_cmor_write_here}', OSError, exc=exc) fre_logger.info("calling rewrite_netcdf_file_var") try: @@ -782,10 +782,11 @@ def cmorize_target_var_files(indir: str = None, json_table_config, prev_path=nc_fls[i] ) except Exception as exc: #uncovered - raise Exception( + log_and_raise( 'problem with rewrite_netcdf_file_var. ' f'exc={exc}\n' - 'exiting and executing finally block.') from exc + 'exiting and executing finally block.', + Exception, exc=exc) finally: # should always execute, errors or not! fre_logger.warning('finally, changing directory to: \n%s', gotta_go_back_here) os.chdir(gotta_go_back_here) @@ -795,8 +796,8 @@ def cmorize_target_var_files(indir: str = None, # now that CMOR has rewritten things... we can take our post-rewriting actions # first, remove /CMOR_tmp/ from the output path. if not Path(local_file_name).is_absolute(): - raise ValueError(f'local_file_name should be an absolute path, not a relative one. \n ' - f'local_file_name = {local_file_name}') + log_and_raise(f'local_file_name should be an absolute path, not a relative one. \n ' + f'local_file_name = {local_file_name}', ValueError) fre_logger.info('local_file_name = %s', local_file_name) filename = local_file_name.replace('/CMOR_tmp/','/') @@ -958,26 +959,28 @@ def cmor_run_subtool(indir: str = None, """ # CHECK req'd inputs if None in [indir, json_var_list, json_table_config, json_exp_config, outdir]: - raise ValueError('the following input arguments are required:\n' - '[indir, json_var_list, json_table_config, json_exp_config, outdir] = \n' - '[%s, %s, %s, %s, %s]', indir, json_var_list, json_table_config, json_exp_config, outdir) + log_and_raise(f'the following input arguments are required:\n' + f'[indir, json_var_list, json_table_config, json_exp_config, outdir] = \n' + f'[{indir}, {json_var_list}, {json_table_config}, {json_exp_config}, {outdir}]', + ValueError) # CHECK existence of the exp-specific metadata file if Path(json_exp_config).exists(): json_exp_config = str(Path(json_exp_config).resolve()) else: - raise FileNotFoundError('ERROR: json_exp_config file cannot be opened.\n' - 'json_exp_config = %s', json_exp_config) + log_and_raise(f'ERROR: json_exp_config file cannot be opened.\n' + f'json_exp_config = {json_exp_config}', + FileNotFoundError) # CHECK mip_era entry of exp config exists, needed ? try: exp_cfg_mip_era = get_json_file_data(json_exp_config)['mip_era'].upper() except KeyError as exc: - raise KeyError('no mip_era entry in experimental metadata configuration, the file is noncompliant!') from exc + log_and_raise('no mip_era entry in experimental metadata configuration, the file is noncompliant!', KeyError, exc=exc) fre_logger.debug('exp_cfg_mip_era = %s', exp_cfg_mip_era) if exp_cfg_mip_era not in ['CMIP6', 'CMIP7']: - raise ValueError('cmor_mixer only supports CMIP6 and CMIP7 cases') + log_and_raise('cmor_mixer only supports CMIP6 and CMIP7 cases', ValueError) if exp_cfg_mip_era == 'CMIP7': fre_logger.warning('CMIP7 configuration detected, will be expecting and enforcing variable brands.') @@ -1013,7 +1016,7 @@ def cmor_run_subtool(indir: str = None, mip_var_list = [ var.split('_')[0] for var in mip_fullvar_list ] mip_var_brand_list = [ var.split('_')[1] for var in mip_fullvar_list ] if len(mip_var_list) != len(mip_var_brand_list): - raise ValueError('the number of brands is not one-to-one with the number of variables. check config.') + log_and_raise('the number of brands is not one-to-one with the number of variables. check config.', ValueError) elif exp_cfg_mip_era == "CMIP6": mip_var_list = mip_fullvar_list @@ -1049,13 +1052,15 @@ def cmor_run_subtool(indir: str = None, # CHECK that there's at least one variable to run after comparing use inputs vars to MIP config input vars if len(vars_to_run) < 1: - raise ValueError('runnable variable list is of length 0 ' - 'this means no variables in input variable list are in ' - 'the mip table configuration, so there\'s nothing to process!') + log_and_raise('runnable variable list is of length 0 ' + 'this means no variables in input variable list are in ' + 'the mip table configuration, so there\'s nothing to process!', + ValueError) if all([opt_var_name is not None, opt_var_name not in list(vars_to_run.keys())]): - raise ValueError('opt_var_name is not None! (== %s)' - '... but the variable is not contained in the target mip table' - '... there\'s nothing to process, exit', opt_var_name) + log_and_raise(f'opt_var_name is not None! (== {opt_var_name})' + '... but the variable is not contained in the target mip table' + '... there\'s nothing to process, exit', + ValueError) fre_logger.info('runnable variable list formed, it is vars_to_run=\n%s', vars_to_run) @@ -1065,7 +1070,7 @@ def cmor_run_subtool(indir: str = None, indir_filenames = glob.glob(f'{indir}/*.nc') indir_filenames.sort() if len(indir_filenames) == 0: - raise ValueError('no files in input target directory = indir = \n%s', indir) + log_and_raise(f'no files in input target directory = indir = \n{indir}', ValueError) fre_logger.debug('found %s filenames', len(indir_filenames)) # name_of_set == component label diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 86538104d..37d501bcd 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -50,11 +50,11 @@ def validate_yaml(yamlfile: dict) -> None: validate(instance = yamlfile,schema=schema) fre_logger.info("Combined yaml valid") except SchemaError as exc: - raise ValueError(f"Schema '{schema_path}' is not valid. Contact the FRE team.") from exc + log_and_raise(f"Schema '{schema_path}' is not valid. Contact the FRE team.", ValueError, exc=exc) except ValidationError as exc: - raise ValueError("Combined yaml is not valid. Please fix the errors and try again.") from exc + log_and_raise("Combined yaml is not valid. Please fix the errors and try again.", ValueError, exc=exc) except Exception as exc: - raise ValueError("Unclear error from validation. Please try to find the error and try again.") from exc + log_and_raise("Unclear error from validation. Please try to find the error and try again.", ValueError, exc=exc) #################### def rose_init(experiment: str, platform: str, target: str) -> metomi.rose.config.ConfigNode: @@ -219,9 +219,9 @@ def yaml_info(yamlfile: str = None, experiment: str = None, platform: str = None fre_logger.info('Starting') if None in [yamlfile, experiment, platform, target]: - raise ValueError( 'yamlfile, experiment, platform, and target must all not be None.' - 'currently, their values are...' - f'{yamlfile} / {experiment} / {platform} / {target}') + log_and_raise('yamlfile, experiment, platform, and target must all not be None.' + 'currently, their values are...' + f'{yamlfile} / {experiment} / {platform} / {target}', ValueError) e = experiment p = platform t = target diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 3b4af30a8..cf6d755ac 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -33,6 +33,7 @@ #import logging #from typing import Optional +from fre import log_and_raise from fre.yamltools.info_parsers import cmor_info_parser as cmip from fre.yamltools.info_parsers import compile_info_parser as cip from fre.yamltools.info_parsers import pp_info_parser as ppip @@ -85,7 +86,7 @@ def get_combined_cmoryaml( yamlfile: Union[str, Path], #fre_logger.debug('...CmorYaml =\n %s...', pformat(CmorYaml)) #assert False # good. except Exception as exc: - raise ValueError("CMORYaml.combine_model failed") from exc + log_and_raise("CMORYaml.combine_model failed", ValueError, exc=exc) # Merge cmor experiment yamls into combined file, calls experiment_check @@ -97,7 +98,7 @@ def get_combined_cmoryaml( yamlfile: Union[str, Path], #fre_logger.debug('... yaml_content = ...\n %s', pformat(yaml_content)) #assert False # good. except Exception as exc: - raise Exception(f"CmorYaml.combine_model failed for some reason.\n exc =\n {exc}") from exc + log_and_raise(f"CmorYaml.combine_model failed for some reason.\n exc =\n {exc}", Exception, exc=exc) # settings.yaml is deprecated for the cmor path — cmor yamls are now self-contained. @@ -113,7 +114,7 @@ def get_combined_cmoryaml( yamlfile: Union[str, Path], #fre_logger.debug('... comb_cmor_updated_list = ...\n %s', pformat(comb_cmor_updated_list)) ##assert False # good. except Exception as exc: - raise Exception(f"CmorYaml.combine_experiment failed for some reason.\n exc =\n {exc}") from exc + log_and_raise(f"CmorYaml.combine_experiment failed for some reason.\n exc =\n {exc}", Exception, exc=exc) try: fre_logger.info('\ncalling CmorYaml.merge_cmor_yaml(), for full_cmor_yaml.\n' @@ -123,7 +124,7 @@ def get_combined_cmoryaml( yamlfile: Union[str, Path], #fre_logger.debug('... full_cmor_yaml = ...\n %s', pformat(full_cmor_yaml)) ##assert False # good. except Exception as exc: - raise Exception(f"CmorYaml.merge_cmor_yaml failed for some reason.\n exc =\n {exc}") from exc + log_and_raise(f"CmorYaml.merge_cmor_yaml failed for some reason.\n exc =\n {exc}", Exception, exc=exc) cleaned_yaml = CmorYaml.clean_yaml(full_cmor_yaml) fre_logger.info("Combined cmor-yaml information cleaned+saved as dictionary") @@ -211,6 +212,6 @@ def consolidate_yamls(yamlfile:str, experiment:str, platform:str, # yml_dict = get_combined_cmoryaml( CmorYaml, output ) fre_logger.info('... done attempting to combine cmor yaml info') else: - raise ValueError("'use' value is not valid; must be one of: 'compile', 'pp', or 'cmor'") + log_and_raise("'use' value is not valid; must be one of: 'compile', 'pp', or 'cmor'", ValueError) return yml_dict diff --git a/fre/yamltools/helpers.py b/fre/yamltools/helpers.py index 858458ec2..934f80705 100644 --- a/fre/yamltools/helpers.py +++ b/fre/yamltools/helpers.py @@ -14,6 +14,7 @@ from . import * +from fre import log_and_raise fre_logger = logging.getLogger(__name__) @@ -81,7 +82,7 @@ def experiment_check(mainyaml_dir, experiment, loaded_yaml): exp_list.append(i.get("name")) if experiment not in exp_list: - raise NameError(f"{experiment} is not in the list of experiments") + log_and_raise(f"{experiment} is not in the list of experiments", NameError) # Extract yaml path for exp. provided # if experiment matches name in list of experiments in yaml, extract file path @@ -95,12 +96,12 @@ def experiment_check(mainyaml_dir, experiment, loaded_yaml): analysisyaml=i.get("analysis") if expyaml is None: - raise ValueError("No experiment yaml path given!") + log_and_raise("No experiment yaml path given!", ValueError) for e in expyaml: ey = Path( os.path.join(mainyaml_dir, e) ) if not ey.exists(): - raise ValueError(f"Experiment yaml path given ({e}) does not exist.") + log_and_raise(f"Experiment yaml path given ({e}) does not exist.", ValueError) ey_path.append(ey) # Currently, if there are no analysis scripts defined, set None @@ -111,7 +112,7 @@ def experiment_check(mainyaml_dir, experiment, loaded_yaml): # prepend the directory containing the yaml ay = Path(os.path.join(mainyaml_dir,a)) if not ay.exists(): - raise ValueError("Incorrect analysis yaml path given; does not exist.") + log_and_raise("Incorrect analysis yaml path given; does not exist.", ValueError) ay_path.append(ay) return (ey_path, ay_path) @@ -166,6 +167,6 @@ def validate_yaml(yaml, schema_path): fre_logger.info(" YAML dictionary VALID.\n") return True except: - raise ValueError(" YAML dictionary NOT VALID.\n") + log_and_raise(" YAML dictionary NOT VALID.\n", ValueError) finally: fre_logger.setLevel(former_log_level) diff --git a/fre/yamltools/info_parsers/analysis_info_parser.py b/fre/yamltools/info_parsers/analysis_info_parser.py index ac24fe8db..ed6787a4d 100644 --- a/fre/yamltools/info_parsers/analysis_info_parser.py +++ b/fre/yamltools/info_parsers/analysis_info_parser.py @@ -3,6 +3,7 @@ ''' import os import logging +from fre import log_and_raise #import pprint from fre.yamltools.helpers import experiment_check, clean_yaml @@ -217,19 +218,19 @@ def combine(self): try: yaml_content_str = self.combine_model() except Exception as exc: - raise ValueError("ERR: Could not merge model yaml config with name, platform, and target.") from exc + log_and_raise("ERR: Could not merge model yaml config with name, platform, and target.", ValueError, exc=exc) # Merge settings into combined file try: yaml_content_str = self.combine_settings(yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge setting config with model config.") from exc + log_and_raise("ERR: Could not merge setting config with model config.", ValueError, exc=exc) # Merge analysis yamls, if defined, into combined file try: comb_analysis_updated_list = self.combine_yamls(yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge analysis yaml config with model and setting config") from exc + log_and_raise("ERR: Could not merge analysis yaml config with model and setting config", ValueError, exc=exc) # Merge model/analysis yamls if more than 1 is defined # (without overwriting the yaml) @@ -237,11 +238,11 @@ def combine(self): full_combined = self.merge_multiple_yamls(comb_analysis_updated_list, yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge multiple analysis yaml configs together.") from exc + log_and_raise("ERR: Could not merge multiple analysis yaml configs together.", ValueError, exc=exc) try: cleaned_yaml = clean_yaml(full_combined) except Exception as exc: - raise ValueError("The final YAML could not cleaned.") from exc + log_and_raise("The final YAML could not cleaned.", ValueError, exc=exc) return cleaned_yaml diff --git a/fre/yamltools/info_parsers/cmor_info_parser.py b/fre/yamltools/info_parsers/cmor_info_parser.py index 77fa77cbc..115e9f7f5 100644 --- a/fre/yamltools/info_parsers/cmor_info_parser.py +++ b/fre/yamltools/info_parsers/cmor_info_parser.py @@ -37,6 +37,7 @@ import logging fre_logger = logging.getLogger(__name__) +from fre import log_and_raise # this boots yaml with !join- see __init__ from . import * @@ -65,7 +66,7 @@ def experiment_check( mainyaml_dir: Union[str, Path], exp_list.append(i.get("name")) if experiment not in exp_list: - raise NameError(f"{experiment} is not in the list of experiments") + log_and_raise(f"{experiment} is not in the list of experiments", NameError) # Extract yaml path for exp. provided # if experiment matches name in list of experiments in yaml, extract file path @@ -84,12 +85,12 @@ def experiment_check( mainyaml_dir: Union[str, Path], else: grid_yaml_path=os.path.join(mainyaml_dir, gridyaml) if not Path(grid_yaml_path).exists(): - raise FileNotFoundError('%s not found!!!', grid_yaml_path) + log_and_raise(f'{grid_yaml_path} not found!!!', FileNotFoundError) ppyamls=i.get('pp') fre_logger.info(f'ppyamls is going to look like ppyamls=\n{ppyamls}') if ppyamls is None: - raise ValueError(f"no ppyaml paths found under experiment = {experiment}") + log_and_raise(f"no ppyaml paths found under experiment = {experiment}", ValueError) #ppsettingsyaml=None #for ppyaml in ppyamls: @@ -111,17 +112,17 @@ def experiment_check( mainyaml_dir: Union[str, Path], cmoryaml=i.get("cmor")[0] if cmoryaml is None: - raise ValueError("No experiment yaml path given!") + log_and_raise("No experiment yaml path given!", ValueError) fre_logger.info(f'cmoryaml={cmoryaml} found- now checking for existence.') if not Path(os.path.join(mainyaml_dir, cmoryaml)).exists(): - raise FileNotFoundError(f'cmoryaml={cmoryaml} does not exist!') + log_and_raise(f'cmoryaml={cmoryaml} does not exist!', FileNotFoundError) cmor_yaml_path = Path( os.path.join(mainyaml_dir, cmoryaml) ) break if cmor_yaml_path is None: - raise ValueError('... something wrong... cmor_yaml_path is None... it should not be none!') + log_and_raise('... something wrong... cmor_yaml_path is None... it should not be none!', ValueError) fre_logger.info(f'cmor_info_parser\'s experiment_check about to return cmor_yaml_path!') return cmor_yaml_path, settings_yaml_path, grid_yaml_path @@ -271,7 +272,7 @@ def combine_experiment( self, fre_logger.info(f'cmory_path = {cmory_path}') if cmory_path is None: - raise ValueError('cmory_path is none!') + log_and_raise('cmory_path is none!', ValueError) #fre_logger.info(f'ppsettingsy_path = {ppsettingsy_path}') #if ppsettingsy_path is None: @@ -324,7 +325,7 @@ def merge_cmor_yaml( self, :rtype: dict """ if cmor_list is None: - raise ValueError('cmor_list is none and should not be!!!') + log_and_raise('cmor_list is none and should not be!!!', ValueError) #fre_logger.debug("loaded_yaml =\n %s", pformat(loaded_yaml)) #fre_logger.debug("cmor_list =\n %s", pformat(cmor_list)) diff --git a/fre/yamltools/info_parsers/compile_info_parser.py b/fre/yamltools/info_parsers/compile_info_parser.py index 4cca5395d..a0ace58f9 100644 --- a/fre/yamltools/info_parsers/compile_info_parser.py +++ b/fre/yamltools/info_parsers/compile_info_parser.py @@ -3,6 +3,7 @@ ''' import os import logging +from fre import log_and_raise # this boots yaml with !join- see __init__ #from fre.yamltools import * from fre.yamltools.helpers import clean_yaml @@ -31,7 +32,7 @@ def get_compile_paths(full_path, yaml_content): for key,value in yml.items(): if key == "build": if (value.get("platformYaml") or value.get("compileYaml")) is None: - raise ValueError("Compile or platform yaml not defined") + log_and_raise("Compile or platform yaml not defined", ValueError) py_path = os.path.join(full_path,value.get("platformYaml")) cy_path = os.path.join(full_path,value.get("compileYaml")) @@ -164,24 +165,24 @@ def combine(self): try: yaml_content=self.combine_model() except Exception as exc: - raise ValueError("ERR: Could not merge model yaml config with name, platform, and target.") from exc + log_and_raise("ERR: Could not merge model yaml config with name, platform, and target.", ValueError, exc=exc) # Merge compile into combined file to create updated yaml_content/yaml try: yaml_content = self.combine_compile(yaml_content) except Exception as exc: - raise ValueError("ERR: Could not merge compile yaml config with model config.") from exc + log_and_raise("ERR: Could not merge compile yaml config with model config.", ValueError, exc=exc) # Merge platforms.yaml into combined file try: full_combined = self.combine_platforms(yaml_content) except Exception as exc: - raise ValueError("ERR: Could not merge platform yaml config with model and compile configs.") from exc + log_and_raise("ERR: Could not merge platform yaml config with model and compile configs.", ValueError, exc=exc) # Clean the yaml try: cleaned_yaml = clean_yaml(full_combined) except Exception as exc: - raise ValueError("The final YAML could not cleaned.") from exc + log_and_raise("The final YAML could not cleaned.", ValueError, exc=exc) return cleaned_yaml diff --git a/fre/yamltools/info_parsers/pp_info_parser.py b/fre/yamltools/info_parsers/pp_info_parser.py index aeb228ecd..0075e8d63 100644 --- a/fre/yamltools/info_parsers/pp_info_parser.py +++ b/fre/yamltools/info_parsers/pp_info_parser.py @@ -5,6 +5,7 @@ import logging import pprint +from fre import log_and_raise from fre.yamltools.helpers import experiment_check, clean_yaml from fre.yamltools.abstract_classes import MergePPANYamls #from fre.yamltools.val_yml_structures import ModelYmlStructure @@ -131,7 +132,7 @@ def combine_yamls(self, yaml_content_str: str): ## COMBINE EXPERIMENT YAML INFO # If only 1 pp yaml defined, combine with model yaml if ey_path is None: - raise ValueError('if ey_path is None, then pp_yamls will be an empty list. Exit!') + log_and_raise('if ey_path is None, then pp_yamls will be an empty list. Exit!', ValueError) elif len(ey_path) == 1: #expyaml_path = os.path.join(mainyaml_dir, i) @@ -239,18 +240,18 @@ def combine(self): # Merge model into combined file yaml_content_str = self.combine_model() except Exception as exc: - raise ValueError("ERR: Could not merge model yaml config with name, platform, and target.") from exc + log_and_raise("ERR: Could not merge model yaml config with name, platform, and target.", ValueError, exc=exc) try: # Merge model into combined file yaml_content_str = self.combine_settings(yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge setting config with model config.") from exc + log_and_raise("ERR: Could not merge setting config with model config.", ValueError, exc=exc) try: # Merge pp yamls, if defined, into combined file comb_pp_updated_list = self.combine_yamls(yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge pp yaml config with model and setting config.") from exc + log_and_raise("ERR: Could not merge pp yaml config with model and setting config.", ValueError, exc=exc) try: # Merge model/pp yamls if more than 1 is defined @@ -258,11 +259,11 @@ def combine(self): full_combined = self.merge_multiple_yamls(comb_pp_updated_list, yaml_content_str) except Exception as exc: - raise ValueError("ERR: Could not merge multiple pp yaml configs together.") from exc + log_and_raise("ERR: Could not merge multiple pp yaml configs together.", ValueError, exc=exc) try: cleaned_yaml = clean_yaml(full_combined) except Exception as exc: - raise ValueError("The final YAML could not cleaned.") from exc + log_and_raise("The final YAML could not cleaned.", ValueError, exc=exc) return cleaned_yaml From 5cf50b11df815db445aa2bf31a8f8d20782de42b Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 3 Apr 2026 19:42:34 +0000 Subject: [PATCH 07/10] Add -> NoReturn annotation to log_and_raise to fix pylint possibly-used-before-assignment errors Agent-Logs-Url: https://github.com/NOAA-GFDL/fre-cli/sessions/6e92661a-f061-4b23-a675-7ddeb69b0a21 Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fre/__init__.py b/fre/__init__.py index 9b568a244..e24d08175 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -4,6 +4,7 @@ import logging import os +from typing import NoReturn version = os.getenv("GIT_DESCRIBE_TAG", "2026.01.alpha2") __version__ = version @@ -16,7 +17,7 @@ encoding = 'utf-8' ) -def log_and_raise(msg, exc_type=ValueError, exc=None): +def log_and_raise(msg, exc_type=ValueError, exc=None) -> NoReturn: """ Log an error message via fre_logger and raise an exception with the same message. Avoids the need to duplicate error text in both fre_logger.error() and raise calls. From 202b5b39a97c7fa8ac8da359944ea60398d56fef Mon Sep 17 00:00:00 2001 From: "Ian L." <6273252+ilaflott@users.noreply.github.com> Date: Mon, 6 Apr 2026 15:51:52 -0400 Subject: [PATCH 08/10] Improve error logging in log_and_raise function Replace fre_logger.error with fre_logger.exception for better error logging. --- fre/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/fre/__init__.py b/fre/__init__.py index e24d08175..edd85aca0 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -27,6 +27,9 @@ def log_and_raise(msg, exc_type=ValueError, exc=None) -> NoReturn: :param exc_type: Exception class to raise. Defaults to ValueError. :type exc_type: type :param exc: Optional original exception to chain from (uses ``raise ... from exc``). + When provided, logging uses ``fre_logger.exception()`` to capture the + traceback. Should be called from within an ``except`` block for full + traceback detail. :type exc: Exception, optional :raises exc_type: Always raised with the given message. @@ -42,7 +45,8 @@ def log_and_raise(msg, exc_type=ValueError, exc=None) -> NoReturn: except KeyError as e: log_and_raise("update failed", KeyError, exc=e) """ - fre_logger.error(msg, stacklevel=2) if exc is not None: + fre_logger.exception(msg, stacklevel=2) raise exc_type(msg) from exc + fre_logger.error(msg, stacklevel=2) raise exc_type(msg) From 95c375dc1627d14f806b350830b3d55f1a9b9591 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 6 Apr 2026 20:07:14 +0000 Subject: [PATCH 09/10] Align log_and_raise with Python logging best practices; add log-file traceback tests Per Python logging best practices (PEP 282, logging HOWTO): - Use exc_info=True (not logger.exception) for caught exception tracebacks - Use stack_info=True to always include call-site stack trace in log output - Both ensure Traceback info reaches log files set up via fre -l New tests verify that when a FileHandler is attached (simulating fre -l ./logger_output.log): - stack trace appears in log file for simple raises (stack_info=True) - caught exception traceback appears in log file for chained raises (exc_info=True) Agent-Logs-Url: https://github.com/NOAA-GFDL/fre-cli/sessions/73b3875d-4130-4999-9488-0dc773ca1c45 Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/__init__.py | 21 +++++-- fre/tests/test_fre_log_and_raise.py | 89 +++++++++++++++++++++++++++++ 2 files changed, 105 insertions(+), 5 deletions(-) diff --git a/fre/__init__.py b/fre/__init__.py index edd85aca0..2c4784d7e 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -22,14 +22,25 @@ def log_and_raise(msg, exc_type=ValueError, exc=None) -> NoReturn: Log an error message via fre_logger and raise an exception with the same message. Avoids the need to duplicate error text in both fre_logger.error() and raise calls. + Per Python logging best practices (see :pep:`282` and the + `logging HOWTO `_): + + * ``exc_info=True`` is passed when *exc* is given so the caught exception's + traceback is written to every configured handler (including any log file + set up via ``fre -l``). + * ``stack_info=True`` is always passed so the call-site stack trace appears + in every handler, making it easy to locate the origin of the error in a + log file. + * ``stacklevel=2`` attributes the log record to the **caller** of + ``log_and_raise``, not this helper itself. + :param msg: Error message to log and include in the exception. :type msg: str :param exc_type: Exception class to raise. Defaults to ValueError. :type exc_type: type :param exc: Optional original exception to chain from (uses ``raise ... from exc``). - When provided, logging uses ``fre_logger.exception()`` to capture the - traceback. Should be called from within an ``except`` block for full - traceback detail. + When provided, ``exc_info=True`` is set so the caught exception's + traceback is included in the log output. :type exc: Exception, optional :raises exc_type: Always raised with the given message. @@ -46,7 +57,7 @@ def log_and_raise(msg, exc_type=ValueError, exc=None) -> NoReturn: log_and_raise("update failed", KeyError, exc=e) """ if exc is not None: - fre_logger.exception(msg, stacklevel=2) + fre_logger.error(msg, exc_info=True, stack_info=True, stacklevel=2) raise exc_type(msg) from exc - fre_logger.error(msg, stacklevel=2) + fre_logger.error(msg, stack_info=True, stacklevel=2) raise exc_type(msg) diff --git a/fre/tests/test_fre_log_and_raise.py b/fre/tests/test_fre_log_and_raise.py index a08728bc1..aaebadb62 100644 --- a/fre/tests/test_fre_log_and_raise.py +++ b/fre/tests/test_fre_log_and_raise.py @@ -7,9 +7,12 @@ - supports exception chaining via the exc parameter - defaults to ValueError when no exc_type is given - reports the correct caller in log output (not log_and_raise itself) +- writes stack trace info to a log file (simulating ``fre -l ./log.log``) +- writes exception traceback to a log file when exc is provided """ import logging +from pathlib import Path import pytest @@ -54,3 +57,89 @@ def test_log_and_raise_caller_in_log(caplog): break else: pytest.fail("expected log record not found") + + +def test_log_and_raise_stack_trace_in_log_file(tmp_path): + """ + Simulates ``fre -l ./logger_output.log`` by attaching a FileHandler to + the ``fre`` logger, then verifying that log_and_raise writes stack trace + information to the log file via ``stack_info=True``. + + Per Python logging best practices (docs.python.org/3/howto/logging.html), + ``stack_info=True`` causes the current call stack to appear in the log + output, making it straightforward to locate the origin of an error. + """ + from fre import FORMAT + + log_file = tmp_path / "logger_output.log" + + # Mirror fre.py: attach a FileHandler to the root 'fre' logger + base_logger = logging.getLogger("fre") + handler = logging.FileHandler(str(log_file), mode="w", encoding="utf-8") + handler.setFormatter(logging.Formatter(fmt=FORMAT)) + handler.setLevel(logging.ERROR) + base_logger.addHandler(handler) + + try: + with pytest.raises(ValueError, match="stack trace test"): + log_and_raise("stack trace test") + finally: + base_logger.removeHandler(handler) + handler.close() + + log_content = log_file.read_text(encoding="utf-8") + + # The error message itself must appear + assert "stack trace test" in log_content + + # stack_info=True produces a "Stack (most recent call last):" block + assert "Stack (most recent call last):" in log_content + + # The call stack should reference THIS test function + assert "test_log_and_raise_stack_trace_in_log_file" in log_content + + +def test_log_and_raise_traceback_in_log_file(tmp_path): + """ + Simulates ``fre -l ./logger_output.log`` and verifies that when + log_and_raise is called with ``exc`` (inside an except block), the + caught exception's traceback appears in the log file via + ``exc_info=True``. + + This matches the Python best practice of using ``exc_info=True`` to + record the full traceback of a caught exception + (see docs.python.org/3/library/logging.html#logging.Logger.error). + """ + from fre import FORMAT + + log_file = tmp_path / "logger_output.log" + + base_logger = logging.getLogger("fre") + handler = logging.FileHandler(str(log_file), mode="w", encoding="utf-8") + handler.setFormatter(logging.Formatter(fmt=FORMAT)) + handler.setLevel(logging.ERROR) + base_logger.addHandler(handler) + + try: + try: + raise RuntimeError("original failure") + except RuntimeError as caught: + with pytest.raises(OSError, match="wrapped failure"): + log_and_raise("wrapped failure", OSError, exc=caught) + finally: + base_logger.removeHandler(handler) + handler.close() + + log_content = log_file.read_text(encoding="utf-8") + + # The error message itself must appear + assert "wrapped failure" in log_content + + # exc_info=True produces a "Traceback (most recent call last):" block + assert "Traceback (most recent call last):" in log_content + + # The original exception type and message must appear in the traceback + assert "RuntimeError: original failure" in log_content + + # stack_info=True also adds the call stack + assert "Stack (most recent call last):" in log_content From 3b994053ce870f0ee1b62212f6df6902f1f29405 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 6 Apr 2026 20:08:18 +0000 Subject: [PATCH 10/10] Remove unused Path import from test_fre_log_and_raise.py Agent-Logs-Url: https://github.com/NOAA-GFDL/fre-cli/sessions/73b3875d-4130-4999-9488-0dc773ca1c45 Co-authored-by: ilaflott <6273252+ilaflott@users.noreply.github.com> --- fre/tests/test_fre_log_and_raise.py | 1 - 1 file changed, 1 deletion(-) diff --git a/fre/tests/test_fre_log_and_raise.py b/fre/tests/test_fre_log_and_raise.py index aaebadb62..2cc907644 100644 --- a/fre/tests/test_fre_log_and_raise.py +++ b/fre/tests/test_fre_log_and_raise.py @@ -12,7 +12,6 @@ """ import logging -from pathlib import Path import pytest