Skip to content

Commit 6b68078

Browse files
Update Hera /scratch1 and /scratch2 paths (#1835)
1 parent a25ab26 commit 6b68078

File tree

11 files changed

+133
-211
lines changed

11 files changed

+133
-211
lines changed

parm/jcb-gdas/modulefiles/GDAS/hera.intel.lua

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,9 @@ local mpinproc = '-n'
7979
setenv('MPIEXEC_EXEC', mpiexec)
8080
setenv('MPIEXEC_NPROC', mpinproc)
8181

82-
setenv("CRTM_FIX","/scratch1/NCEPDEV/da/role.jedipara/GDASApp/fix/crtm/2.4.0")
83-
setenv("GDASAPP_TESTDATA","/scratch1/NCEPDEV/da/role.jedipara/GDASApp/testdata")
84-
setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/scratch1/NCEPDEV/da/role.jedipara/GDASApp/unittestdata")
82+
setenv("CRTM_FIX","/scratch3/NCEPDEV/da/role.jedipara/GDASApp/fix/crtm/2.4.0")
83+
setenv("GDASAPP_TESTDATA","/scratch3/NCEPDEV/da/role.jedipara/GDASApp/testdata")
84+
setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/scratch3/NCEPDEV/da/role.jedipara/GDASApp/unittestdata")
8585

8686
whatis("Name: ".. pkgName)
8787
whatis("Version: ".. pkgVersion)

parm/jcb-gdas/prototypes/configs/cp0.sh

Lines changed: 0 additions & 25 deletions
This file was deleted.

parm/jcb-gdas/prototypes/gen_prototype.sh

Lines changed: 0 additions & 109 deletions
This file was deleted.

parm/jcb-gdas/test/aero/global-workflow/jjob_var_final.sh

Lines changed: 39 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ bindir=$1
55
srcdir=$2
66

77
# Set g-w HOMEgfs
8-
export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config
8+
topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P)
9+
export HOMEgfs=$topdir
910

1011
# Set variables for ctest
1112
export PSLOT=gdas_test
@@ -20,33 +21,51 @@ export pid=${pid:-$$}
2021
export jobid=$pid
2122
export COMROOT=$DATAROOT
2223
export NMEM_ENS=0
23-
export ACCOUNT=da-cpu
24+
25+
# Detect machine
26+
source "${HOMEgfs}/ush/detect_machine.sh"
27+
28+
# Set up the PYTHONPATH to include wxflow from HOMEgfs
29+
if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then
30+
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src"
31+
fi
2432

2533
# Set python path for workflow utilities and tasks
2634
wxflowPATH="${HOMEgfs}/ush/python"
2735
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
2836
export PYTHONPATH
2937

30-
# Detemine machine from config.base
31-
machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"')
32-
33-
# Set NETCDF and UTILROOT variables (used in config.base)
34-
if [ $machine = 'HERA' ]; then
35-
NETCDF=$( which ncdump )
36-
export NETCDF
37-
export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2"
38-
elif [ $machine = 'ORION' ]; then
39-
ncdump=$( which ncdump )
40-
NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
41-
export NETCDF
42-
export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2
38+
# Export library path
39+
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib"
40+
41+
# Create yaml with job configuration
42+
memory="8Gb"
43+
if [[ ${MACHINE_ID} == "gaeac6" ]]; then
44+
memory=0
4345
fi
46+
config_yaml="./config_${type}.yaml"
47+
cat <<EOF > ${config_yaml}
48+
machine: ${MACHINE_ID}
49+
homegfs: ${HOMEgfs}
50+
job_name: ${type}
51+
walltime: "00:30:00"
52+
nodes: 1
53+
ntasks_per_node: 1
54+
threads_per_task: 1
55+
memory: ${memory}
56+
command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
57+
filename: submit_${type}.sh
58+
EOF
59+
60+
# Create script to execute j-job
61+
$HOMEgfs/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml}
62+
SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ')
4463

45-
# Execute j-job
46-
if [ $machine = 'HERA' ]; then
47-
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
48-
elif [ $machine = 'ORION' ]; then
49-
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
64+
# Submit script to execute j-job
65+
if [[ $SCHEDULER = 'slurm' ]]; then
66+
sbatch --export=ALL --wait submit_${type}.sh
67+
elif [[ $SCHEDULER = 'pbspro' ]]; then
68+
qsub -V -W block=true submit_${type}.sh
5069
else
5170
${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE
5271
fi

parm/jcb-gdas/test/aero/global-workflow/jjob_var_init.sh

Lines changed: 40 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ bindir=$1
55
srcdir=$2
66

77
# Set g-w HOMEgfs
8-
export HOMEgfs=$srcdir/../../ # TODO: HOMEgfs had to be hard-coded in config
8+
topdir=$(cd "$(dirname "$(readlink -f -n "${bindir}" )" )/../../.." && pwd -P)
9+
export HOMEgfs=$topdir
910

1011
# Set variables for ctest
1112
export PSLOT=gdas_test
@@ -21,32 +22,27 @@ export pid=${pid:-$$}
2122
export jobid=$pid
2223
export COMROOT=$DATAROOT
2324
export NMEM_ENS=0
24-
export ACCOUNT=da-cpu
2525
export COM_TOP=$ROTDIR
2626

2727
# Set GFS COM paths
2828
source "${HOMEgfs}/ush/preamble.sh"
2929
source "${HOMEgfs}/dev/parm/config/gfs/config.com"
3030

31+
# Detect machine
32+
source "${HOMEgfs}/ush/detect_machine.sh"
33+
34+
# Set up the PYTHONPATH to include wxflow from HOMEgfs
35+
if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then
36+
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src"
37+
fi
38+
3139
# Set python path for workflow utilities and tasks
3240
wxflowPATH="${HOMEgfs}/ush/python"
3341
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
3442
export PYTHONPATH
3543

36-
# Detemine machine from config.base
37-
machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"')
38-
39-
# Set NETCDF and UTILROOT variables (used in config.base)
40-
if [ $machine = 'HERA' ]; then
41-
NETCDF=$( which ncdump )
42-
export NETCDF
43-
export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2"
44-
elif [ $machine = 'ORION' ]; then
45-
ncdump=$( which ncdump )
46-
NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
47-
export NETCDF
48-
export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2
49-
fi
44+
# Export library path
45+
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib"
5046

5147
# Set date variables for previous cycle
5248
gPDY=$(date +%Y%m%d -d "${PDY} ${cyc} - 6 hours")
@@ -85,12 +81,34 @@ for file in $flist; do
8581
cp $GDASAPP_TESTDATA/lowres/$dpath/$file $COMIN_ATMOS_RESTART_PREV_DIRNAME/restart/
8682
done
8783

88-
89-
# Execute j-job
90-
if [ $machine = 'HERA' ]; then
91-
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
92-
elif [ $machine = 'ORION' ]; then
93-
sbatch --ntasks=1 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
84+
# Create yaml with job configuration
85+
memory="8Gb"
86+
if [[ ${MACHINE_ID} == "gaeac6" ]]; then
87+
memory=0
88+
fi
89+
config_yaml="./config_${type}.yaml"
90+
cat <<EOF > ${config_yaml}
91+
machine: ${MACHINE_ID}
92+
homegfs: ${HOMEgfs}
93+
job_name: ${type}
94+
walltime: "00:30:00"
95+
nodes: 1
96+
ntasks_per_node: 1
97+
threads_per_task: 1
98+
memory: ${memory}
99+
command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
100+
filename: submit_${type}.sh
101+
EOF
102+
103+
# Create script to execute j-job
104+
$HOMEgfs/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml}
105+
SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ')
106+
107+
# Submit script to execute j-job
108+
if [[ $SCHEDULER = 'slurm' ]]; then
109+
sbatch --export=ALL --wait submit_${type}.sh
110+
elif [[ $SCHEDULER = 'pbspro' ]]; then
111+
qsub -V -W block=true submit_${type}.sh
94112
else
95113
${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE
96114
fi

parm/jcb-gdas/test/aero/global-workflow/jjob_var_run.sh

Lines changed: 38 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -23,33 +23,52 @@ export pid=${pid:-$$}
2323
export jobid=$pid
2424
export COMROOT=$DATAROOT
2525
export NMEM_ENS=0
26-
export ACCOUNT=da-cpu
26+
27+
# Detect machine
28+
source "${HOMEgfs}/ush/detect_machine.sh"
29+
30+
# Set up the PYTHONPATH to include wxflow from HOMEgfs
31+
if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then
32+
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src"
33+
fi
2734

2835
# Set python path for workflow utilities and tasks
2936
wxflowPATH="${HOMEgfs}/ush/python"
3037
PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
3138
export PYTHONPATH
3239

33-
# Detemine machine from config.base
34-
machine=$(echo `grep 'machine=' $EXPDIR/config.base | cut -d"=" -f2` | tr -d '"')
35-
36-
# Set NETCDF and UTILROOT variables (used in config.base)
37-
if [ $machine = 'HERA' ]; then
38-
NETCDF=$( which ncdump )
39-
export NETCDF
40-
export UTILROOT="/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/intel-18.0.5.274/prod_util/1.2.2"
41-
elif [ $machine = 'ORION' ]; then
42-
ncdump=$( which ncdump )
43-
NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 )
44-
export NETCDF
45-
export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2
40+
# Export library path
41+
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:${HOMEgfs}/lib"
42+
43+
# Create yaml with job configuration
44+
memory="96Gb"
45+
if [[ ${MACHINE_ID} == "gaeac6" ]]; then
46+
memory=0
4647
fi
48+
config_yaml="./config_${type}.yaml"
49+
cat <<EOF > ${config_yaml}
50+
machine: ${MACHINE_ID}
51+
homegfs: ${HOMEgfs}
52+
job_name: ${type}
53+
walltime: "00:30:00"
54+
nodes: 1
55+
ntasks_per_node: 6
56+
threads_per_task: 1
57+
memory: ${memory}
58+
command: ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN
59+
filename: submit_${type}.sh
60+
EOF
61+
62+
# Create script to execute j-job. Set job scheduler
63+
${HOMEgfs}/sorc/gdas.cd/test/workflow/generate_job_script.py ${config_yaml}
64+
SCHEDULER=$(echo `grep SCHEDULER ${HOMEgfs}/sorc/gdas.cd/test/workflow/hosts/${MACHINE_ID}.yaml | cut -d":" -f2` | tr -d ' ')
4765

48-
# Execute j-job
49-
if [ $machine = 'HERA' ]; then
50-
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN
51-
elif [ $machine = 'ORION' ]; then
52-
sbatch --ntasks=6 --account=$ACCOUNT --qos=batch --partition=orion --time=00:10:00 --export=ALL --wait ${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN
66+
# Submit script to execute j-job
67+
if [[ $SCHEDULER = 'slurm' ]]; then
68+
sbatch --export=ALL --wait submit_${type}.sh
69+
elif [[ $SCHEDULER = 'pbspro' ]]; then
70+
qsub -V -W block=true submit_${type}.sh
5371
else
5472
${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN
5573
fi
74+

0 commit comments

Comments
 (0)