diff --git a/modulefiles/GDAS/hera.intel.lua b/modulefiles/GDAS/hera.intel.lua index add6e1d65..ece1126a6 100644 --- a/modulefiles/GDAS/hera.intel.lua +++ b/modulefiles/GDAS/hera.intel.lua @@ -79,9 +79,9 @@ local mpinproc = '-n' setenv('MPIEXEC_EXEC', mpiexec) setenv('MPIEXEC_NPROC', mpinproc) -setenv("CRTM_FIX","/scratch1/NCEPDEV/da/role.jedipara/GDASApp/fix/crtm/2.4.0") -setenv("GDASAPP_TESTDATA","/scratch1/NCEPDEV/da/role.jedipara/GDASApp/testdata") -setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/scratch1/NCEPDEV/da/role.jedipara/GDASApp/unittestdata") +setenv("CRTM_FIX","/scratch3/NCEPDEV/da/role.jedipara/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/scratch3/NCEPDEV/da/role.jedipara/GDASApp/testdata") +setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/scratch3/NCEPDEV/da/role.jedipara/GDASApp/unittestdata") whatis("Name: ".. pkgName) whatis("Version: ".. pkgVersion) diff --git a/prototypes/configs/cp0.sh b/prototypes/configs/cp0.sh deleted file mode 100644 index 1f474d113..000000000 --- a/prototypes/configs/cp0.sh +++ /dev/null @@ -1,25 +0,0 @@ -# top level options -GWHASH=255d99b -GDASHASH=ab6f06d -BUILD="NO" -PSLOT=cp0 -PROTOROOT=/scratch1/NCEPDEV/da/Cory.R.Martin/GDAS_prototypes -GWDIR=$PROTOROOT/$PSLOT -DUMPDIR=/scratch1/NCEPDEV/da/role.jedipara/dump/ -ICSDIR=/scratch1/NCEPDEV/stmp4/Cory.R.Martin/tmp_ics -expdir=$PROTOROOT/expdirs -comrot=$PROTOROOT/rotdirs -idate=2021073118 -edate=2021080118 -app="ATM" -starttype="cold" -gfscyc=0 -resdetatmos=96 -resensatmos=96 -nens=0 -# config.* options -DO_JEDIATMVAR="YES" -DO_JEDIATMENS="NO" -DO_JEDIOCNVAR="NO" -DO_JEDISNOWDA="NO" -DO_MERGENSST="NO" diff --git a/prototypes/gen_prototype.sh b/prototypes/gen_prototype.sh deleted file mode 100755 index 439585625..000000000 --- a/prototypes/gen_prototype.sh +++ /dev/null @@ -1,109 +0,0 @@ -#!/bin/bash -set -e - -# ============================================================================== -usage() { - set +x - echo - echo "Usage: $0 -c -t -h" - echo - echo " -c Configuration for prototype defined in shell script " - echo " -t Supported platform script is running on " - echo " -h display this message and quit" - echo - exit 1 -} - -# ============================================================================== -while getopts "c:t:h" opt; do - case $opt in - c) - config=$OPTARG - ;; - t) - MACHINE=${MACHINE:-$OPTARG} - ;; - h|\?|:) - usage - ;; - esac -done - -dir_root="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../ >/dev/null 2>&1 && pwd )" - -# source the input configuration -source $config - -# create directories -mkdir -p $PROTOROOT/$PSLOT - -# clone/build/link workflow and GDASApp -if [[ $BUILD == 'YES' ]]; then - cd $GWDIR - git clone https://github.com/NOAA-EMC/global-workflow.git - cd global-workflow/sorc - git checkout $GWHASH - ./checkout.sh -ug - cd gdas.cd - git checkout $GDASHASH - cd ../ - ./build_all.sh gfs gsi gdas - ./link_workflow.sh -fi - -# load modules to then generate experiment directory and rocoto XML -module use $dir_root/modulefiles -module load GDAS/$MACHINE - -# move expdir if it exists, delete backup if it exists -if [[ -d $expdir/$PSLOT ]]; then - [[ -d $expdir/${PSLOT}.bak ]] && rm -rf $expdir/${PSLOT}.bak - mv $expdir/$PSLOT $expdir/${PSLOT}.bak -fi - -# move rotdir if it exists, delete backup if it exists -if [[ -d $comrot/$PSLOT ]]; then - [[ -d $comrot/${PSLOT}.bak ]] && rm -rf $comrot/${PSLOT}.bak - mv $comrot/$PSLOT $comrot/${PSLOT}.bak -fi - -# create YAML to override workflow config defaults -mkdir -p $expdir -cat > $expdir/config_${PSLOT}.yaml << EOF -base: - ACCOUNT: "da-cpu" - HPSS_PROJECT: "emc-da" - HOMEDIR: "/scratch1/NCEPDEV/da/${USER}" - DMPDIR: "${DUMPDIR}" - DO_JEDIATMVAR: "${DO_JEDIATMVAR}" - DO_JEDIATMENS: "${DO_JEDIATMENS}" - DO_JEDIOCNVAR: "${DO_JEDIOCNVAR}" - DO_JEDISNOWDA: "${DO_JEDISNOWDA}" - DO_MERGENSST: "${DO_MERGENSST}" -EOF - -# setup experiment -cd $GWDIR/global-workflow/workflow -./setup_expt.py gfs cycled --idate $idate \ - --edate $edate \ - --app $app \ - --start $starttype \ - --gfs_cyc $gfscyc \ - --resdetatmos $resdetatmos \ - --resensatmos $resensatmos \ - --nens $nens \ - --pslot $PSLOT \ - --configdir $GWDIR/global-workflow/dev/parm/config/gfs \ - --comrot $comrot \ - --expdir $expdir \ - --icsdir $ICSDIR \ - --yaml $expdir/config_${PSLOT}.yaml - -# setup XML for workflow -./setup_xml.py $expdir/$PSLOT - -# run rocotorun one time -rocotorun -w $expdir/$PSLOT/${PSLOT}.xml -d $expdir/$PSLOT/${PSLOT}.db - -# run rocotostat on the first cycle to see if things were submitted -rocotostat -w $expdir/$PSLOT/${PSLOT}.xml -d $expdir/$PSLOT/${PSLOT}.db -c ${idate}00 diff --git a/test/aero/global-workflow/jjob_var_final.sh b/test/aero/global-workflow/jjob_var_final.sh index ad205b028..3684d3aad 100755 --- a/test/aero/global-workflow/jjob_var_final.sh +++ b/test/aero/global-workflow/jjob_var_final.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -20,33 +21,51 @@ export pid=${pid:-$$} export jobid=$pid export COMROOT=$DATAROOT export NMEM_ENS=0 -export ACCOUNT=da-cpu + +# Detect machine +source "${HOMEgfs}/ush/detect_machine.sh" + +# Set up the PYTHONPATH to include wxflow from HOMEgfs +if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then + PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +fi # Set python path for workflow utilities and tasks wxflowPATH="${HOMEgfs}/ush/python" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH -# Detemine machine from config.base -machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') - -# Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then - NETCDF=$( which ncdump ) - export NETCDF - export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then - ncdump=$( which ncdump ) - NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) - export NETCDF - export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2 +# Export library path +export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib" + +# Create yaml with job configuration +memory="8Gb" +if [[ ${MACHINE_ID} == "gaeac6" ]]; then + memory=0 fi +config_yaml="./config_${type}.yaml" +cat < ${config_yaml} +machine: ${MACHINE_ID} +homegfs: ${HOMEgfs} +job_name: ${type} +walltime: "00:30:00" +nodes: 1 +ntasks_per_node: 1 +threads_per_task: 1 +memory: ${memory} +command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +filename: submit_${type}.sh +EOF + +# Create script to execute j-job +$HOMEgfs/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml} +SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ') -# Execute j-job -if [ $machine = 'HERA' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +# Submit script to execute j-job +if [[ $SCHEDULER = 'slurm' ]]; then + sbatch --export=ALL --wait submit_${type}.sh +elif [[ $SCHEDULER = 'pbspro' ]]; then + qsub -V -W block=true submit_${type}.sh else ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE fi diff --git a/test/aero/global-workflow/jjob_var_init.sh b/test/aero/global-workflow/jjob_var_init.sh index 329d3d410..6220f1c8e 100755 --- a/test/aero/global-workflow/jjob_var_init.sh +++ b/test/aero/global-workflow/jjob_var_init.sh @@ -5,7 +5,8 @@ bindir=$1 srcdir=$2 # Set g-w HOMEgfs -export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config +topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P) +export HOMEgfs=$topdir # Set variables for ctest export PSLOT=gdas_test @@ -21,32 +22,27 @@ export pid=${pid:-$$} export jobid=$pid export COMROOT=$DATAROOT export NMEM_ENS=0 -export ACCOUNT=da-cpu export COM_TOP=$ROTDIR # Set GFS COM paths source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/dev/parm/config/gfs/config.com" +# Detect machine +source "${HOMEgfs}/ush/detect_machine.sh" + +# Set up the PYTHONPATH to include wxflow from HOMEgfs +if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then + PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +fi + # Set python path for workflow utilities and tasks wxflowPATH="${HOMEgfs}/ush/python" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH -# Detemine machine from config.base -machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') - -# Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then - NETCDF=$( which ncdump ) - export NETCDF - export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then - ncdump=$( which ncdump ) - NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) - export NETCDF - export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2 -fi +# Export library path +export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib" # Set date variables for previous cycle gPDY=$(date +%Y%m%d -d "${PDY} ${cyc} - 6 hours") @@ -85,12 +81,34 @@ for file in $flist; do cp $GDASAPP_TESTDATA/lowres/$dpath/$file $COMIN_ATMOS_RESTART_PREV_DIRNAME/restart/ done - -# Execute j-job -if [ $machine = 'HERA' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +# Create yaml with job configuration +memory="8Gb" +if [[ ${MACHINE_ID} == "gaeac6" ]]; then + memory=0 +fi +config_yaml="./config_${type}.yaml" +cat < ${config_yaml} +machine: ${MACHINE_ID} +homegfs: ${HOMEgfs} +job_name: ${type} +walltime: "00:30:00" +nodes: 1 +ntasks_per_node: 1 +threads_per_task: 1 +memory: ${memory} +command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +filename: submit_${type}.sh +EOF + +# Create script to execute j-job +$HOMEgfs/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml} +SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ') + +# Submit script to execute j-job +if [[ $SCHEDULER = 'slurm' ]]; then + sbatch --export=ALL --wait submit_${type}.sh +elif [[ $SCHEDULER = 'pbspro' ]]; then + qsub -V -W block=true submit_${type}.sh else ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE fi diff --git a/test/aero/global-workflow/jjob_var_run.sh b/test/aero/global-workflow/jjob_var_run.sh index 4c04d0944..ff55dab3e 100755 --- a/test/aero/global-workflow/jjob_var_run.sh +++ b/test/aero/global-workflow/jjob_var_run.sh @@ -23,33 +23,52 @@ export pid=${pid:-$$} export jobid=$pid export COMROOT=$DATAROOT export NMEM_ENS=0 -export ACCOUNT=da-cpu + +# Detect machine +source "${HOMEgfs}/ush/detect_machine.sh" + +# Set up the PYTHONPATH to include wxflow from HOMEgfs +if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then + PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +fi # Set python path for workflow utilities and tasks wxflowPATH="${HOMEgfs}/ush/python" PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH -# Detemine machine from config.base -machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"') - -# Set NETCDF and UTILROOT variables (used in config.base) -if [ $machine = 'HERA' ]; then - NETCDF=$( which ncdump ) - export NETCDF - export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2" -elif [ $machine = 'ORION' ]; then - ncdump=$( which ncdump ) - NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) - export NETCDF - export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2 +# Export library path +export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib" + +# Create yaml with job configuration +memory="96Gb" +if [[ ${MACHINE_ID} == "gaeac6" ]]; then + memory=0 fi +config_yaml="./config_${type}.yaml" +cat < ${config_yaml} +machine: ${MACHINE_ID} +homegfs: ${HOMEgfs} +job_name: ${type} +walltime: "00:30:00" +nodes: 1 +ntasks_per_node: 6 +threads_per_task: 1 +memory: ${memory} +command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN +filename: submit_${type}.sh +EOF + +# Create script to execute j-job. Set job scheduler +${HOMEgfs}/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml} +SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ') -# Execute j-job -if [ $machine = 'HERA' ]; then - sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN -elif [ $machine = 'ORION' ]; then - sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN +# Submit script to execute j-job +if [[ $SCHEDULER = 'slurm' ]]; then + sbatch --export=ALL --wait submit_${type}.sh +elif [[ $SCHEDULER = 'pbspro' ]]; then + qsub -V -W block=true submit_${type}.sh else ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN fi + diff --git a/test/marine/CMakeLists.txt b/test/marine/CMakeLists.txt index 8ba3d088b..861f46d2c 100644 --- a/test/marine/CMakeLists.txt +++ b/test/marine/CMakeLists.txt @@ -84,7 +84,7 @@ set(BUFR_TEST_DIR_ORION "/work/noaa/da/marineda/gfs-marine/data/obs/ci/bufr" ) set(BUFR_TEST_DIR_HERA - "/scratch1/NCEPDEV/da/common/ci/bufr" + "/scratch3/NCEPDEV/da/common/ci/bufr" ) CHECK_AND_SET_PATH( ${BUFR_TEST_DIR_ORION} @@ -98,7 +98,7 @@ else() # message(STATUS "Found bufr test directory: ${BUFR_TEST_DIR}") set(OCEAN_BASIN_FILE_HERA - "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/common/RECCAP2_region_masks_all_v20221025.nc" + "/scratch3/NCEPDEV/da/common/validation/RECCAP2_region_masks_all_v20221025.nc" ) set(OCEAN_BASIN_FILE_ORION "/work2/noaa/global/role-global/fix/gdas/soca/20240802/common/RECCAP2_region_masks_all_v20221025.nc" diff --git a/ush/ufoeval/README_phase3 b/ush/ufoeval/README_phase3 index d7d4c1410..01a34ff0b 100644 --- a/ush/ufoeval/README_phase3 +++ b/ush/ufoeval/README_phase3 @@ -17,7 +17,7 @@ config_gsi.yaml and config_jedi.yaml contain the following user settings HOMEgfs: global-workflow installation STAGEDIR: machine specific path containing job input, set to the machine appropriate path below - Hera: /scratch1/NCEPDEV/da/Russ.Treadon/STAGEDIR + Hera: /scratch3/NCEPDEV/da/Russ.Treadon/STAGEDIR Hercules, Orion: /work2/noaa/da/rtreadon/STAGEDIR RUNDIR: path to the directory in which job will run. Full path is RUNDIR + DA_CORE + DA_TYPE - app files diff --git a/ush/ufoeval/config_gsi.yaml b/ush/ufoeval/config_gsi.yaml index 28b0e9adf..4cbd02434 100644 --- a/ush/ufoeval/config_gsi.yaml +++ b/ush/ufoeval/config_gsi.yaml @@ -11,11 +11,11 @@ job options: mem: 96Gb directories: - HOMEgfs: /scratch1/NCEPDEV/da/Russ.Treadon/git/global-workflow/test - STAGEDIR: /scratch1/NCEPDEV/da/Russ.Treadon/STAGEDIR - RUNDIR: /scratch1/NCEPDEV/stmp2/Russ.Treadon/RUNDIRS + HOMEgfs: /scratch3/NCEPDEV/da/Russ.Treadon/git/global-workflow/test + STAGEDIR: /scratch3/NCEPDEV/da/Russ.Treadon/STAGEDIR + RUNDIR: /scratch3/NCEPDEV/stmp2/Russ.Treadon/RUNDIRS app files: DA_CORE: gsi DA_TYPE: 3dv - APPEXE: /scratch1/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/gsi.x + APPEXE: /scratch3/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/gsi.x diff --git a/ush/ufoeval/config_jedi.yaml b/ush/ufoeval/config_jedi.yaml index f65569c61..7339b52c0 100644 --- a/ush/ufoeval/config_jedi.yaml +++ b/ush/ufoeval/config_jedi.yaml @@ -11,12 +11,12 @@ job options: mem: 96Gb directories: - HOMEgfs: /scratch1/NCEPDEV/da/Russ.Treadon/git/global-workflow/test - STAGEDIR: /scratch1/NCEPDEV/da/Russ.Treadon/STAGEDIR - RUNDIR: /scratch1/NCEPDEV/stmp2/Russ.Treadon/RUNDIRS + HOMEgfs: /scratch3/NCEPDEV/da/Russ.Treadon/git/global-workflow/test + STAGEDIR: /scratch3/NCEPDEV/da/Russ.Treadon/STAGEDIR + RUNDIR: /scratch3/NCEPDEV/stmp2/Russ.Treadon/RUNDIRS app files: DA_CORE: jedi DA_TYPE: 3dv - APPEXE: /scratch1/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/gdas.x - INCEXE: /scratch1/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/fv3jedi_fv3inc.x + APPEXE: /scratch3/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/gdas.x + INCEXE: /scratch3/NCEPDEV/da/Russ.Treadon/git/global-workflow/test/exec/fv3jedi_fv3inc.x diff --git a/utils/test/CMakeLists.txt b/utils/test/CMakeLists.txt index a74bea39c..d77d51ad8 100644 --- a/utils/test/CMakeLists.txt +++ b/utils/test/CMakeLists.txt @@ -94,7 +94,7 @@ set(RTOFS_FILES_PATH if (NOT EXISTS ${RTOFS_FILES_PATH}) # rtofs directory on hera: set(RTOFS_FILES_PATH - "/scratch1/NCEPDEV/da/common/ci/obs" + "/scratch3/NCEPDEV/da/common/ci/obs" ) endif() if (NOT EXISTS ${RTOFS_FILES_PATH})