diff --git a/torchtune/training/metric_logging.py b/torchtune/training/metric_logging.py index c22bd06b0..19d3a0ef0 100644 --- a/torchtune/training/metric_logging.py +++ b/torchtune/training/metric_logging.py @@ -46,7 +46,7 @@ def save_config(config: DictConfig) -> Path: OmegaConf.save(config, output_config_fname) return output_config_fname except Exception as e: - log.warning(f"Error saving config to {output_config_fname}.\nError: \n{e}.") + log.warning(f"Error saving config.\nError: \n{e}.") class MetricLoggerInterface(Protocol): @@ -421,7 +421,8 @@ def __init__( ) from e # Remove 'log_dir' from kwargs as it is not a valid argument for comet_ml.ExperimentConfig - del kwargs["log_dir"] + if "log_dir" in kwargs: + del kwargs["log_dir"] _, self.rank = get_world_size_and_rank()