diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 537fa792..63997758 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -23,7 +23,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.9", "3.10"] + python-version: ["3.11", "3.12", "3.13"] mpi: ["hpc", "nompi", "mpich", "openmpi"] fail-fast: false steps: diff --git a/docs/releasing/testing/deploying-on-hpcs.md b/docs/releasing/testing/deploying-on-hpcs.md index 02562461..bf8b9a14 100644 --- a/docs/releasing/testing/deploying-on-hpcs.md +++ b/docs/releasing/testing/deploying-on-hpcs.md @@ -142,12 +142,79 @@ used during deployment. * Permissions have been updated successfully (read only for everyone except the E3SM-Unified maintainer) -4. **Manually test** tools in the installed environment +4. **Verify compute-node activation** + + The activation scripts are designed to load a no-MPI environment on login + nodes and the MPI-enabled environment on compute nodes (detected via + scheduler variables like `$SLURM_JOB_ID` or `$COBALT_JOBID`). Before manual + testing, confirm that sourcing the script on a compute node loads the MPI + environment as expected. + + Steps: + + * Start an interactive job on a compute node, for example: + + - Slurm: + + ```bash + salloc -N 1 -t 10:00 + ``` + + - Cobalt: + + ```bash + qsub -I -n 1 -t 10 + ``` + + - PBS (example): + + ```bash + qsub -I -l select=1:ncpus=1:mpiprocs=1,walltime=00:10:00 + ``` + + * On the compute node, source the activation script: + + - Bash/zsh: + + ```bash + source test_e3sm_unified__.sh + ``` + + - csh/tcsh: + + ```bash + source test_e3sm_unified__.csh + ``` + + For release builds, use the corresponding `load_e3sm_unified__.*` + or `load_latest_e3sm_unified_.*` script names. + + * Verify that the MPI environment is active (not the no-MPI one): + + ```bash + echo "$E3SMU_MPI" # should NOT be "NOMPI" on a compute node + which python # should point to the E3SM-Unified conda env + python -c "import mpi4py, xarray; print('mpi4py:', mpi4py.__version__)" + ``` + + Optional quick MPI sanity check (if mpirun/srun is available on the node): + + ```bash + mpirun -n 2 python -c "from mpi4py import MPI; print(MPI.COMM_WORLD.Get_size())" + # or, for Slurm + srun -n 2 python -c "from mpi4py import MPI; print(MPI.COMM_WORLD.Get_size())" + ``` + + If the script loads the no-MPI environment (`E3SMU_MPI=NOMPI`) on a compute + node, check that the scheduler environment variables are present on compute + nodes for this machine and update the activation templates if needed. + +5. **Manually test** tools in the installed environment * Load via: `source test_e3sm_unified__.sh` * Run tools like `zppy`, `e3sm_diags`, `mpas_analysis` -5. **Deploy more broadly** once core systems pass testing +6. **Deploy more broadly** once core systems pass testing --- diff --git a/e3sm_supported_machines/bootstrap.py b/e3sm_supported_machines/bootstrap.py index a3acec4e..4d5c4a31 100755 --- a/e3sm_supported_machines/bootstrap.py +++ b/e3sm_supported_machines/bootstrap.py @@ -3,17 +3,22 @@ import os import subprocess import shutil +from pathlib import Path from jinja2 import Template from importlib import resources from configparser import ConfigParser from mache import discover_machine -from mache.spack import make_spack_env, get_spack_script, \ - get_modules_env_vars_and_mpi_compilers +from mache.machines.pre_conda import load_pre_conda_script +from mache.spack import ( + make_spack_env, + get_spack_script, + get_modules_env_vars_and_mpi_compilers, +) from mache.permissions import update_permissions from shared import ( check_call, - get_conda_base, + get_base, get_rc_dev_labels, install_miniforge3, parse_args, @@ -75,7 +80,7 @@ def get_env_setup(args, config, machine): if machine is not None and compiler is not None: conda_mpi = 'hpc' - env_suffix = f'_{machine}' + env_suffix = '_compute' else: conda_mpi = mpi env_suffix = '_login' @@ -110,12 +115,6 @@ def build_env(is_test, recreate, compiler, mpi, conda_mpi, version, os.chdir(build_dir) env_name = f'e3sm_unified_{version}{env_suffix}' - - # add the compiler and MPI library to the spack env name - spack_env = f'{env_name}_{compiler}_{mpi}' - # spack doesn't like dots - spack_env = spack_env.replace('.', '_') - env_path = os.path.join(conda_base, 'envs', env_name) if conda_mpi in ['nompi', 'hpc']: @@ -137,6 +136,9 @@ def build_env(is_test, recreate, compiler, mpi, conda_mpi, version, if local_conda_build is not None: channels = f'{channels} -c {local_conda_build}' + if 'rc' in version: + channels = f'{channels} -c conda-forge/label/e3sm_unified_dev' + meta_yaml_path = os.path.join( os.path.dirname(__file__), "..", @@ -183,7 +185,7 @@ def build_env(is_test, recreate, compiler, mpi, conda_mpi, version, else: print(f'{env_name} already exists') - return env_path, env_name, activate_env, channels, spack_env + return env_path, env_name, activate_env, channels def install_mache_from_branch(activate_env, fork, branch): @@ -196,7 +198,7 @@ def install_mache_from_branch(activate_env, fork, branch): def build_sys_ilamb_esmpy(config, machine, compiler, mpi, template_path, - activate_env, channels, spack_base, spack_env): + activate_env, channels, spack_base): mpi4py_version = config.get('e3sm_unified', 'mpi4py') ilamb_version = config.get('e3sm_unified', 'ilamb') @@ -224,7 +226,7 @@ def build_sys_ilamb_esmpy(config, machine, compiler, mpi, template_path, modules = f'{activate_env_lines}\n{modules}' spack_view = f'{spack_base}/var/spack/environments/' \ - f'{spack_env}/.spack-env/view' + f'e3sm_spack_env/.spack-env/view' script = template.render( mpicc=mpicc, modules=modules, template_path=template_path, mpi4py_version=mpi4py_version, build_mpi4py=str(build_mpi4py), @@ -248,10 +250,10 @@ def build_sys_ilamb_esmpy(config, machine, compiler, mpi, template_path, return esmf_mk -def build_spack_env(config, machine, compiler, mpi, spack_env, tmpdir): +def build_spack_env(config, machine, compiler, mpi, version, tmpdir): - base_path = config.get('e3sm_unified', 'base_path') - spack_base = f'{base_path}/spack/{spack_env}' + base_path = get_base(config, version) + spack_base_path = f'{base_path}/{machine}/spack/{compiler}_{mpi}' if config.has_option('e3sm_unified', 'use_e3sm_hdf5_netcdf'): use_e3sm_hdf5_netcdf = config.getboolean('e3sm_unified', @@ -274,20 +276,37 @@ def build_spack_env(config, machine, compiler, mpi, spack_env, tmpdir): continue value = section[option] if value != '': - specs.append(f'"{value}"') + specs.append(f'{value}') - make_spack_env(spack_path=spack_base, env_name=spack_env, + make_spack_env(spack_path=spack_base_path, env_name='e3sm_spack_env', spack_specs=specs, compiler=compiler, mpi=mpi, machine=machine, tmpdir=tmpdir, include_e3sm_lapack=True, include_e3sm_hdf5_netcdf=use_e3sm_hdf5_netcdf, spack_mirror=spack_mirror) - return spack_base + return spack_base_path + + +def write_load_e3sm_unified( + template_path, + activ_path, + conda_base, + is_test, + version, + activ_suffix, + env_name, + env_nompi, + sys_info, + ext, + machine, + spack_script, +): + pre_conda_script = load_pre_conda_script(machine=machine, ext=ext) -def write_load_e3sm_unified(template_path, activ_path, conda_base, is_test, - version, activ_suffix, env_name, env_nompi, - sys_info, ext, machine, spack_script): + print(f'Pre-conda script for {machine} ({ext}):') + print(pre_conda_script) + print('---') try: os.makedirs(activ_path) @@ -321,14 +340,17 @@ def write_load_e3sm_unified(template_path, activ_path, conda_base, is_test, else: env_type = 'SYSTEM' - script = template.render(conda_base=conda_base, env_name=env_name, - env_type=env_type, - script_filename=script_filename, - env_nompi=env_nompi, - spack='\n '.join(spack_script.split('\n')), - modules='\n '.join(sys_info['modules']), - env_vars=env_vars, - machine=machine) + script = template.render( + pre_conda_script=pre_conda_script, + conda_base=conda_base, + env_name=env_name, + env_type=env_type, + script_filename=script_filename, + env_nompi=env_nompi, + spack='\n '.join(spack_script.split('\n')), + modules='\n '.join(sys_info['modules']), + env_vars=env_vars, + machine=machine) # strip out redundant blank lines lines = list() @@ -375,10 +397,6 @@ def check_env(script_filename, env_name, conda_mpi, machine): command = f'{activate} && python -c "import {import_name}"' test_command(command, os.environ, import_name) - # an extra check because the lack of ESMFRegrid is a problem for e3sm_diags - command = f'{activate} && python -c "from regrid2 import ESMFRegrid"' - test_command(command, os.environ, 'cdms2') - for command in commands: package = command[0] command_str = ' '.join(command) @@ -418,7 +436,8 @@ def main(): else: is_test = not config.getboolean('e3sm_unified', 'release') - conda_base = get_conda_base(args.conda_base, config, shared=True) + base_path = get_base(config, version) + conda_base = os.path.join(base_path, machine, 'conda') conda_base = os.path.abspath(conda_base) source_activation_scripts = \ @@ -439,7 +458,7 @@ def main(): nompi_suffix = '_login' # first, make environment for login nodes. We're using no-MPI from # conda-forge for now - env_path, env_nompi, activate_env, _, _ = build_env( + conda_env_path, env_nompi, activate_env, _ = build_env( is_test, recreate, nompi_compiler, mpi, 'nompi', version, python, conda_base, nompi_suffix, nompi_suffix, activate_base, args.local_conda_build, config) @@ -450,11 +469,22 @@ def main(): branch=args.mache_branch) if not is_test: - # make a symlink to the environment - link = os.path.join(conda_base, 'envs', 'e3sm_unified_latest') - check_call(f'ln -sfn {env_path} {link}') - - env_path, env_name, activate_env, channels, spack_env = build_env( + top_dir = Path(config.get('e3sm_unified', 'base_path')) + nco_dir = (top_dir / "e3smu_latest_for_nco").mkdir(exist_ok=True) + + # copy readme into directory for nco symlinks + readme = Path(template_path) / "e3sm_unified_nco.readme" + shutil.copy(readme, nco_dir / "README") + + link = nco_dir / machine + check_call(f'ln -sfn {conda_env_path} {link}') + + ( + conda_env_path, + conda_env_name, + activate_env, + channels + ) = build_env( is_test, recreate, compiler, mpi, conda_mpi, version, python, conda_base, activ_suffix, env_suffix, activate_base, args.local_conda_build, config) @@ -463,27 +493,39 @@ def main(): env_vars=['export HDF5_USE_FILE_LOCKING=FALSE']) if compiler is not None: - spack_base = build_spack_env(config, machine, compiler, mpi, spack_env, - args.tmpdir) + spack_base = build_spack_env( + config, machine, compiler, mpi, version, args.tmpdir + ) esmf_mk = build_sys_ilamb_esmpy(config, machine, compiler, mpi, template_path, activate_env, channels, - spack_base, spack_env) + spack_base) sys_info['env_vars'].append(esmf_mk) else: spack_base = None + # start restricted permissions at machine level + paths_to_update = [os.path.join(base_path, machine)] test_script_filename = None for ext in ['sh', 'csh']: if compiler is not None: spack_script = get_spack_script( - spack_path=spack_base, env_name=spack_env, compiler=compiler, - mpi=mpi, shell=ext, machine=machine) + spack_path=spack_base, env_name="e3sm_spack_env", + compiler=compiler, mpi=mpi, shell=ext, machine=machine) else: spack_script = '' script_filename = write_load_e3sm_unified( - template_path, activ_path, conda_base, is_test, version, - activ_suffix, env_name, env_nompi, sys_info, ext, machine, + template_path, + activ_path, + conda_base, + is_test, + version, + activ_suffix, + conda_env_name, + env_nompi, + sys_info, + ext, + machine, spack_script) if ext == 'sh': test_script_filename = script_filename @@ -493,17 +535,23 @@ def main(): link = os.path.join(activ_path, link) check_call(f'ln -sfn {script_filename} {link}') - check_env(test_script_filename, env_name, conda_mpi, machine) + # update files before directories, since they are quicker to do + paths_to_update.insert(0, script_filename) + + check_env(test_script_filename, conda_env_name, conda_mpi, machine) commands = f'{activate_base} && conda clean -y -p -t' check_call(commands) - paths = [activ_path, conda_base] - if spack_base is not None: - paths.append(spack_base) group = config.get('e3sm_unified', 'group') - update_permissions(paths, group, show_progress=True, - group_writable=False, other_readable=True) + update_permissions( + base_path, group, group_writable=True, + other_readable=True, recursive=False + ) + update_permissions( + paths_to_update, group, show_progress=True, + group_writable=False, other_readable=True, recursive=True + ) if __name__ == '__main__': diff --git a/e3sm_supported_machines/default.cfg b/e3sm_supported_machines/default.cfg index 1d7e8e43..e8c50f5f 100644 --- a/e3sm_supported_machines/default.cfg +++ b/e3sm_supported_machines/default.cfg @@ -12,7 +12,7 @@ recreate = False suffix = # the python version -python = 3.10 +python = 3.13 # the MPI version (nompi, mpich or openmpi) mpi = nompi @@ -22,10 +22,10 @@ ilamb = 2.7.2 # the version of mache to use during deployment (should match the version used # in the package itself) -mache = 1.28.0 +mache = 2.0.0rc4 # the version of mpi4py to build if using system compilers and MPI -mpi4py = 4.0.1 +mpi4py = 4.1.1 # the version of esmpy to build if using system MPI (must match esmf from # spack below if not None) @@ -39,13 +39,13 @@ xesmf = None # spack package specs [spack_specs] -esmf = esmf@8.8.0+mpi+netcdf~pnetcdf~external-parallelio -hdf5 = hdf5@1.14.3+cxx+fortran+hl+mpi+shared -moab = moab@5.5.1+mpi+hdf5+netcdf+pnetcdf+metis+parmetis+tempest -nco = nco@5.3.2+openmp -netcdf_c = netcdf-c@4.9.2+mpi~parallel-netcdf +esmf = esmf@8.9.0+mpi+netcdf~pnetcdf~external-parallelio +hdf5 = hdf5@1.14.6+cxx+fortran+hl+mpi+shared +moab = moab@5.6.0rc2+mpi+hdf5+netcdf+pnetcdf+metis+parmetis+tempest +nco = nco@5.3.6+openmp +netcdf_c = netcdf-c@4.9.3+mpi~parallel-netcdf netcdf_fortran = netcdf-fortran@4.5.4 parallel_netcdf = parallel-netcdf@1.12.3 # parallelio = parallelio@2.5.10+fortran+mpi~pnetcdf -tempestextremes = tempestextremes@2.2.3+mpi +tempestextremes = tempestextremes@2.4+mpi tempestremap = tempestremap@2.2.0 diff --git a/e3sm_supported_machines/deploy_e3sm_unified.py b/e3sm_supported_machines/deploy_e3sm_unified.py index 567c05e8..37b1d8ef 100755 --- a/e3sm_supported_machines/deploy_e3sm_unified.py +++ b/e3sm_supported_machines/deploy_e3sm_unified.py @@ -4,12 +4,12 @@ import os import sys +import warnings from configparser import ConfigParser from shared import ( check_call, - get_conda_base, install_miniforge3, parse_args, ) @@ -27,6 +27,25 @@ def get_config(config_file): return config +def get_conda_base(conda_base): + if conda_base is None: + if 'CONDA_EXE' in os.environ: + # if this is a test, assume we're the same base as the + # environment currently active + conda_exe = os.environ['CONDA_EXE'] + conda_base = os.path.abspath( + os.path.join(conda_exe, '..', '..')) + warnings.warn( + f'--conda path not supplied. Using conda installed at ' + f'{conda_base}') + else: + raise ValueError('No conda base provided with --conda and ' + 'none could be inferred.') + # handle "~" in the path + conda_base = os.path.abspath(os.path.expanduser(conda_base)) + return conda_base + + def bootstrap(activate_install_env, source_path, local_conda_build): print('Creating the e3sm_unified conda environment') @@ -85,7 +104,7 @@ def main(): config = get_config(args.config_file) - conda_base = get_conda_base(args.conda_base, config, shared=False) + conda_base = get_conda_base(args.conda_base) conda_base = os.path.abspath(conda_base) source_activation_scripts = \ diff --git a/e3sm_supported_machines/shared.py b/e3sm_supported_machines/shared.py index a0309ef4..262d67d4 100644 --- a/e3sm_supported_machines/shared.py +++ b/e3sm_supported_machines/shared.py @@ -3,7 +3,6 @@ import subprocess import os import platform -import warnings try: from urllib.request import urlopen, Request @@ -22,6 +21,7 @@ "nco": "nco_dev", "xcdat": "xcdat_dev", "zppy": "zppy_dev", + "zppy-interfaces": "zppy_interfaces_dev", "zstash": "zstash_dev", } @@ -127,29 +127,14 @@ def install_miniforge3(conda_base, activate_base): check_call(commands) -def get_conda_base(conda_base, config, shared): - if shared: - conda_base = os.path.join( - config.get('e3sm_unified', 'base_path'), 'base') - elif conda_base is None: - if config.has_option('e3sm_unified', 'base_path'): - conda_base = os.path.abspath(os.path.join( - config.get('e3sm_unified', 'base_path'), 'base')) - elif 'CONDA_EXE' in os.environ: - # if this is a test, assume we're the same base as the - # environment currently active - conda_exe = os.environ['CONDA_EXE'] - conda_base = os.path.abspath( - os.path.join(conda_exe, '..', '..')) - warnings.warn( - f'--conda path not supplied. Using conda installed at ' - f'{conda_base}') - else: - raise ValueError('No conda base provided with --conda and ' - 'none could be inferred.') - # handle "~" in the path - conda_base = os.path.abspath(os.path.expanduser(conda_base)) - return conda_base +def get_base(config, version): + """ + Get the base path for E3SM-Unified conda and spack installation + """ + base_path = config.get('e3sm_unified', 'base_path') + subdir = f'e3smu_{version}'.replace('.', '_') + base_path = os.path.join(base_path, subdir) + return base_path def get_rc_dev_labels(meta_yaml_path): diff --git a/e3sm_supported_machines/templates/build.template b/e3sm_supported_machines/templates/build.template index e8cc05b3..5e75a120 100644 --- a/e3sm_supported_machines/templates/build.template +++ b/e3sm_supported_machines/templates/build.template @@ -8,7 +8,11 @@ set -e if [ "{{ build_mpi4py }}" == "True" ]; then - MPICC="{{ mpicc }} -shared" python -m pip install --no-cache-dir "mpi4py=={{ mpi4py_version }}" + MPICC="{{ mpicc }} -shared" python -m pip install \ + --no-cache-dir \ + --no-binary=mpi4py \ + --no-build-isolation \ + "mpi4py=={{ mpi4py_version }}" fi @@ -44,4 +48,4 @@ if [ "{{ build_xesmf }}" == "True" ]; then python -m pip install --no-deps --no-build-isolation . cd ${build_dir} -fi \ No newline at end of file +fi diff --git a/e3sm_supported_machines/templates/e3sm_unified_nco.readme b/e3sm_supported_machines/templates/e3sm_unified_nco.readme new file mode 100644 index 00000000..d756d5eb --- /dev/null +++ b/e3sm_supported_machines/templates/e3sm_unified_nco.readme @@ -0,0 +1,3 @@ +# DO NOT USE + +Symlinks in this directory are only to be used progromatically by NCO. diff --git a/e3sm_supported_machines/templates/load_e3sm_unified.csh.template b/e3sm_supported_machines/templates/load_e3sm_unified.csh.template index 5b44476d..0e7b41ef 100644 --- a/e3sm_supported_machines/templates/load_e3sm_unified.csh.template +++ b/e3sm_supported_machines/templates/load_e3sm_unified.csh.template @@ -1,3 +1,5 @@ +{{ pre_conda_script }} + source {{ conda_base }}/etc/profile.d/conda.csh setenv E3SMU_SCRIPT "{{ script_filename }}" @@ -6,7 +8,7 @@ setenv E3SMU_MACHINE "{{ machine }}" # prevent any attempts to build E3SM with E3SM-Unified loaded setenv CIME_MODEL "ENVIRONMENT_RUNNING_E3SM_UNIFIED_USE_ANOTHER_TERMINAL" -if ( ! $?SLURM_JOB_ID && ! $?COBALT_JOBID ) then +if ( ! $?SLURM_JOB_ID && ! $?COBALT_JOBID && ! $?PB_JOBID ) then # we seem to be on a login node, so load the no-MPI environment setenv E3SMU_MPI "NOMPI" diff --git a/e3sm_supported_machines/templates/load_e3sm_unified.sh.template b/e3sm_supported_machines/templates/load_e3sm_unified.sh.template index 5a12a35e..7c8249e2 100644 --- a/e3sm_supported_machines/templates/load_e3sm_unified.sh.template +++ b/e3sm_supported_machines/templates/load_e3sm_unified.sh.template @@ -1,3 +1,5 @@ +{{ pre_conda_script }} + source {{ conda_base }}/etc/profile.d/conda.sh export E3SMU_SCRIPT="{{ script_filename }}" @@ -6,7 +8,7 @@ export E3SMU_MACHINE="{{ machine }}" # prevent any attempts to build E3SM with E3SM-Unified loaded export CIME_MODEL="ENVIRONMENT_RUNNING_E3SM_UNIFIED_USE_ANOTHER_TERMINAL" -if [ -z ${SLURM_JOB_ID+x} ] && [ -z ${COBALT_JOBID+x} ] +if [ -z ${SLURM_JOB_ID+x} ] && [ -z ${COBALT_JOBID+x} ] && [ -z ${PBS_JOBID+x} ] then # we seem to be on a login node, so load the no-MPI environment diff --git a/recipes/e3sm-unified/build_packages.py b/recipes/e3sm-unified/build_packages.py index 368f8e48..2caf35f1 100755 --- a/recipes/e3sm-unified/build_packages.py +++ b/recipes/e3sm-unified/build_packages.py @@ -8,10 +8,10 @@ from shared import get_rc_dev_labels, get_version_from_meta -DEV_PYTHON_VERSIONS = ["3.10"] +DEV_PYTHON_VERSIONS = ["3.13"] DEV_MPI_VERSIONS = ["nompi", "hpc"] -RELEASE_PYTHON_VERSIONS = ["3.9", "3.10"] +RELEASE_PYTHON_VERSIONS = ["3.11", "3.12", "3.13"] RELEASE_MPI_VERSIONS = ["nompi", "mpich", "openmpi", "hpc"] @@ -31,8 +31,12 @@ def generate_matrix_files(dev, python_versions, mpi_versions): mpi_versions = RELEASE_MPI_VERSIONS matrix_files = [] for python in python_versions: + if float(python) >= 3.13: + python_build_str = f"{python}.* *_cp{''.join(python.split('.'))}" + else: + python_build_str = f"{python}.* *_cpython" for mpi in mpi_versions: - script = template.render(python=python, mpi=mpi) + script = template.render(python=python_build_str, mpi=mpi) filename = f"configs/mpi_{mpi}_python{python}.yaml" with open(filename, "w") as handle: handle.write(script) diff --git a/recipes/e3sm-unified/configs/template.yaml b/recipes/e3sm-unified/configs/template.yaml index cc3bd78a..38adbbe3 100644 --- a/recipes/e3sm-unified/configs/template.yaml +++ b/recipes/e3sm-unified/configs/template.yaml @@ -1,5 +1,5 @@ python: -- {{ python }}.* *_cpython +- {{ python }} mpi: - {{ mpi }} diff --git a/recipes/e3sm-unified/meta.yaml b/recipes/e3sm-unified/meta.yaml index 74b8c3d8..ea5c3824 100644 --- a/recipes/e3sm-unified/meta.yaml +++ b/recipes/e3sm-unified/meta.yaml @@ -1,5 +1,5 @@ {% set name = "E3SM-Unified" %} -{% set version = "1.11.1" %} +{% set version = "1.12.0rc4" %} {% set build = 0 %} package: @@ -32,10 +32,9 @@ requirements: - python - r - cdo - - cime_gen_domain 6.1.59 # [linux] - - chemdyg 1.1.0 - - e3sm_diags 3.0.0 - - e3sm_to_cmip 1.11.3 + - chemdyg 1.1.1rc1 + - e3sm_diags 3.1.0rc2 + - e3sm_to_cmip 1.13.0rc3 - e3sm-tools 3.0.2 # [linux] - geometric_features 1.6.1 # fixes an issues with cryptography <37 constraint @@ -43,23 +42,22 @@ requirements: - ilamb 2.7.2 # [mpi != 'nompi' and mpi != 'hpc'] - ipython - jupyter - - livvkit 3.1.0 - - mache 1.28.0 - - moab 5.5.1 {{ mpi_prefix }}_tempest_* # [mpi != 'hpc'] - - mosaic 1.1.0 # [py>=310] - - mpas-analysis 1.13.0 - - mpas_tools 0.36.0 - - nco 5.3.2 # [mpi != 'hpc'] - - pcmdi_metrics 3.8.2 + - livvkit 3.2.0 + - mache 2.0.0rc4 + - moab 5.6.0rc2 {{ mpi_prefix }}_tempest_* # [mpi != 'hpc'] + - mosaic 1.2.0 + - mpas-analysis 1.14.0rc2 + - mpas_tools 1.4.0rc1 + - nco 5.3.6 # [mpi != 'hpc'] + - pcmdi_metrics 4.0.1 - squadgen 1.2.2 # [linux] - tempest-remap 2.2.0 # [mpi != 'hpc'] - - tempest-extremes 2.2.3 {{ mpi_prefix }}_* # [mpi != 'hpc'] - - uxarray 2024.11.1 # [py<310] - - uxarray >=2024.12.0 # [py>=310] - - xcdat 0.8.0 - - zppy 3.0.0 - - zppy-interfaces 0.1.2 - - zstash 1.4.4 # [linux] + - tempest-extremes 2.4 {{ mpi_prefix }}_* # [mpi != 'hpc'] + - uxarray >=2024.12.0 + - xcdat 0.10.1 + - zppy 3.1.0rc3 + - zppy-interfaces 0.2.0rc2 + - zstash 1.5.0rc4 # [linux] ### mkdocs - mkdocs-material @@ -75,48 +73,37 @@ requirements: - blas - bottleneck - cartopy >=0.17.0 - - cdat_info 8.2.1 - - cdms2 3.1.5 - - cdtime 3.1.4 - - cdutil 8.2.1 - cmocean - # the last version before python 3.9 support was dropped - - dask 2024.8.0 # [py<310] - - dask 2024.11.2 # [py>=310] + - dask 2025.9.1 - dogpile.cache - eofs - - esmf 8.8.0 {{ mpi_prefix }}_* - - esmpy 8.8.0 + - esmf 8.9.0 {{ mpi_prefix }}_* + - esmpy 8.9.0 - f90nml - ffmpeg - - genutil 8.2.1 - globus-sdk - gsw - - hdf5 1.14.3 {{ mpi_prefix }}_* + - hdf5 1.14.6 {{ mpi_prefix }}_* - ipygany - - libnetcdf 4.9.2 {{ mpi_prefix }}_* + - libnetcdf 4.9.3 {{ mpi_prefix }}_* - lxml - - matplotlib-base 3.9.4 + - matplotlib-base 3.10.6 - metpy - mpi4py # [mpi != 'nompi' and mpi != 'hpc'] - - nb_conda - - nb_conda_kernels - ncview 2.1.8 - ncvis-climate 2024.01.26 - nc-time-axis - netcdf4 1.7.2 nompi_* - - notebook <7.0.0 + - notebook - numpy >=2.0.0 - openssh # [mpi == 'openmpi'] - output_viewer 1.3.3 - pillow - plotly - progressbar2 - - proj 9.5.1 + - proj 9.6.2 - pyevtk - # the last version before python 3.9 support was dropped - - pyproj 3.6.1 # [py<310] - - pyproj 3.7.0 # [py>=310] + - pyproj 3.7.2 - pyremap - pytest - pywavelets @@ -128,10 +115,8 @@ requirements: - shapely - sympy >=0.7.6 - tabulate - - windspharm # [py<=310] - # the last version before python 3.9 support was dropped - - xarray 2024.7.0 # [py<310] - - xarray 2025.1.1 # [py>=310] + - windspharm + - xarray 2025.9.0 - xesmf 0.8.8 # addition ilamb 2.7 dependencies, for system MPI builds @@ -165,7 +150,6 @@ test: - ncks --help # [mpi != 'hpc'] - ncap2 --help # [mpi != 'hpc'] - jupyter --help - - jupyter nbextension list - ipython -h - ipython3 -h - globus --help