Skip to content

Commit 7905042

Browse files
authored
Merge pull request #198 from bertinia/master
updates and fixes
2 parents 551c72a + 3d2bb15 commit 7905042

File tree

9 files changed

+65
-74
lines changed

9 files changed

+65
-74
lines changed

Machines/cheyenne_modules

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ module load python/2.7.14
66
module load intel/17.0.1
77
module load ncarenv
88
module load ncarcompilers
9-
module load mpt/2.15f
9+
module load mpt/2.19
1010
module load netcdf/4.6.1
1111
module load nco/4.7.4
1212
module load ncl/6.4.0

Machines/machine_postprocess.xml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
<module>module purge</module>
1919
</reset_modules>
2020
<modules>
21-
<module>module load python/2.7.14</module>
2221
<module>module load intel/17.0.1</module>
2322
<module>module load ncarenv</module>
2423
<module>module load ncarcompilers</module>
@@ -140,11 +139,10 @@
140139
<module>module purge</module>
141140
</reset_modules>
142141
<modules>
143-
<module>module load python/2.7.14</module>
144142
<module>module load intel/17.0.1</module>
145143
<module>module load ncarenv</module>
146144
<module>module load ncarcompilers</module>
147-
<module>module load mpt/2.15f</module>
145+
<module>module load mpt/2.19</module>
148146
<module>module load netcdf/4.6.1</module>
149147
<module>module load nco/4.7.4</module>
150148
<module>module load ncl/6.4.0</module>

Tools/ration.log

Lines changed: 0 additions & 31 deletions
This file was deleted.

Tools/ration_script

Lines changed: 22 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,35 @@
1-
#! /usr/bin/env bash
1+
#!/usr/bin/env bash
22
#
33
# template to activate the virtualenv, call post process program, deactivate virtualenv
44
#
55

6-
#BSUB -n 4
7-
#BSUB -R "span[ptile=2]"
8-
#BSUB -q geyser
9-
#BSUB -N
10-
#BSUB -a poe
11-
#BSUB -J ration_script
12-
#BSUB -W 00:02
13-
#BSUB -P P93300606
6+
#PBS -N ration
7+
#PBS -q regular
8+
#PBS -l select=1:ncpus=8:mpiprocs=8
9+
#PBS -l walltime=00:00:02
10+
#PBS -A P93300606
1411

15-
. /glade/apps/opt/lmod/lmod/init/bash
12+
source /etc/profile.d/modules.sh
1613

17-
export MP_LABELIO=yes
14+
export MPI_UNBUFFERED_STDIO=true
15+
export TMPDIR=$TMPDIR
1816

19-
module load python/2.7.7
17+
module purge
2018

21-
. /glade/p/work/aliceb/sandboxes/dev/postprocessing/cesm-env2/bin/activate
19+
## activate the virtualenv that contains all the non-bootstrapped dependencies
20+
cd /glade/work/aliceb/sandboxes/dev/postprocessing_new/cesm-env2/bin
21+
echo "Running from virtualenv directory:"
22+
pwd
23+
. activate
2224

23-
module load mpi4py/2.0.0
25+
## load the boot-strap modules
26+
##module load python/2.7.14
27+
module load intel/17.0.1
28+
module load ncarenv
29+
module load ncarcompilers
30+
module load mpt/2.19
2431

25-
mpirun.lsf ./ration_example.py >> ./ration.log
32+
mpiexec_mpt dplace -s 1 /glade/work/aliceb/sandboxes/dev/postprocessing_new/Tools/ration_test.py >> /glade/work/aliceb/sandboxes/dev/postprocessing_new/Tools/ration.log
2633

2734
status=$?
2835
echo $status

Tools/ration_script_geyser renamed to Tools/ration_script_dav

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,27 @@
11
#!/bin/bash -l
22

3-
## test the mpi4py and ASAPPyTools utility on geyser with ncar_pylib virtualenv
3+
## test the mpi4py and ASAPPyTools utility on DAV with ncar_pylib virtualenv
44

5-
#SBATCH -t 00:05:00
65
#SBATCH -n 4
76
#SBATCH -N 2
87
#SBATCH --ntasks-per-node=2
8+
#SBATCH -t 00:05:00
99
#SBATCH -p dav
1010
#SBATCH -J ration_test
1111
#SBATCH -A P93300606
12-
#SBATCH -C geyser
1312
#SBATCH --mem 1G
1413
#SBATCH -e ration_test.err.%J
1514
#SBATCH -o ration_test.out.%J
1615

17-
export MP_LABELIO=yes
18-
16+
module purge
1917
module load python/2.7.14
18+
module load intel/17.0.1
19+
module load ncarenv
20+
module load ncarcompilers
21+
module load impi
22+
2023

21-
. /glade2/work/aliceb/sandboxes/dev/postprocessing_geyser/cesm-env2/bin/activate
24+
. /gpfs/fs1/work/aliceb/sandboxes/dev/postprocessing_dav/cesm-env2/bin/activate
2225

2326
srun ./ration_test.py >> ./ration.log
2427

Tools/ration_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/env python
1+
#!/usr/bin/env python2
22
import sys
33

44
try:

diagnostics/diagnostics/ocn/ocn_avg_generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -358,7 +358,7 @@ def createClimFiles(start_year, stop_year, in_dir, htype, tavgdir, case, tseries
358358
if len(averageListMoc) > 0:
359359
# call the pyAverager with the inVarList
360360
if 'MOC' in inVarList:
361-
tmpInVarList = ['MOC']
361+
tmpInVarList = ['MOC','SALT', 'TEMP']
362362
else:
363363
tmpInVarList = ['SALT', 'TEMP']
364364
if main_comm.is_manager():

timeseries/timeseries/cesm_tseries_generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ def readArchiveXML(caseroot, input_rootdir, output_rootdir, casename, standalone
184184
log[comp+stream] = {'slices':[],'index':0}
185185
ts_log_dates = log[comp+stream]['slices']
186186
index = log[comp+stream]['index']
187-
files,dates,index = chunking.get_chunks(tper, index, size_n, stream_dates, ts_log_dates, cal, units, completechunk)
187+
files,dates,index = chunking.get_chunks(tper, index, size_n, stream_dates, ts_log_dates, cal, units, completechunk, tseries_tper)
188188
for d in dates:
189189
log[comp+stream]['slices'].append(float(d))
190190
log[comp+stream]['index']=index

timeseries/timeseries/chunking.py

Lines changed: 29 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ def num2date(time_value, unit, calendar):
1313
time_value = int(round(time_value))
1414
if ('common_year' in unit):
1515
my_unit = unit.replace('common_year', 'day')
16-
## my_time_value = time_value * 365
1716
my_time_value = int(round(time_value)) * 365
1817
else:
1918
my_unit = unit
@@ -130,7 +129,7 @@ def get_input_dates(glob_str, comm, rank, size):
130129
comm.sync()
131130
return g_stream_dates,g_file_slices,calendar.lower(),units,time_period_freq
132131

133-
def get_cesm_date(fn,t=None):
132+
def get_cesm_date(fn,tseries_tper,t=None):
134133

135134
'''
136135
Open a netcdf file and return its datestamp
@@ -156,28 +155,42 @@ def get_cesm_date(fn,t=None):
156155
# for the first lnd and rof file
157156
if ( -1.0 < d < 0.0):
158157
d = 0
159-
if t == 'bb':
158+
elif t == 'bb':
160159
d = f.variables[att['bounds']][0][0]
161160
# for the first lnd and rof file
162161
if ( -1.0 < d < 0.0):
163162
d = 0
164163
elif(d > 1):
165-
d = d = f.variables[att['bounds']][0][1]
164+
d = f.variables[att['bounds']][0][1]
166165
elif t == 'e':
167166
l = len(f.variables[att['bounds']])
168167
d = (f.variables[att['bounds']][l-1][1])-1
169168
elif t == 'ee':
170169
l = len(f.variables[att['bounds']])
171170
d = (f.variables[att['bounds']][l-1][1])
171+
172+
# problem if global attr time_period_freq does not exist in the nc file
173+
if 'time_period_freq' in f.ncattrs():
174+
if 'month' in f.time_period_freq:
175+
if t=='bb' or t=='b':
176+
d = (f.variables[att['bounds']][0][0] + f.variables[att['bounds']][0][1]) / 2
177+
if t=='ee' or t=='e':
178+
l = len(f.variables[att['bounds']])
179+
d = (f.variables[att['bounds']][l-1][0]+f.variables[att['bounds']][l-1][1])/2
180+
elif 'month' in tseries_tper:
181+
if t=='bb' or t=='b':
182+
d = (f.variables[att['bounds']][0][0] + f.variables[att['bounds']][0][1]) / 2
183+
if t=='ee' or t=='e':
184+
l = len(f.variables[att['bounds']])
185+
d = (f.variables[att['bounds']][l-1][0]+f.variables[att['bounds']][l-1][1])/2
186+
172187
else:
173188
# problem if time has only one value when units are common_year
174189
try:
175190
d = f.variables['time'][1]
176191
except:
177192
d = f.variables['time'][0]
178193

179-
180-
## d1 = cf_units.num2date(d,att['units'],att['calendar'].lower())
181194
d1 = num2date(d,att['units'],att['calendar'].lower())
182195
f.close()
183196

@@ -202,7 +215,6 @@ def get_chunk_range(tper, size, start, cal, units):
202215
'''
203216

204217
# Get the first date
205-
## d1 = cf_units.num2date(start, units, cal)
206218
d1 = num2date(start, units, cal)
207219

208220
# Figure out how many days each chunk should be
@@ -222,17 +234,15 @@ def get_chunk_range(tper, size, start, cal, units):
222234
y2 = y2 + 1
223235
m2 = m2 - 12
224236
d2 = datetime.datetime(y2, m2, d1.day, d1.hour, d1.minute)
225-
## end = cf_units.date2num(d2, units, cal)
226237
end = date2num(d2, units, cal)
227238

228239
elif 'year' in tper: #year
229240
d2 = datetime.datetime(int(size)+d1.year, d1.month, d1.day, d1.hour, d1.minute)
230-
## end = cf_units.date2num(d2, units, cal)
231241
end = date2num(d2, units, cal)
232242

233243
return start, end
234244

235-
def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
245+
def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s, tseries_tper):
236246

237247
'''
238248
Figure out what chunks there are to do for a particular CESM output stream
@@ -249,6 +259,7 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
249259
units(string) - the units to use to figure out chunk size
250260
s(string) - flag to determine if we need to wait until we have all data before we create a chunk or
251261
if it's okay to do an incomplete chunk
262+
tseries_tper - time_period_freq read from XML rather than nc file
252263
253264
Output:
254265
files(dictionary) - keys->chunk, values->a list of all files needed for this chunk and the start and end dates
@@ -307,10 +318,10 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
307318
files[chunk_n] = {}
308319
files[chunk_n]['fn'] = sorted(cfiles)
309320
if chunk_n > 0:
310-
files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='bb')
321+
files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='bb')
311322
else:
312-
files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='b')
313-
files[chunk_n]['end'] = get_cesm_date(cfiles[-1],t='e')
323+
files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='b')
324+
files[chunk_n]['end'] = get_cesm_date(cfiles[-1],tseries_tper,t='e')
314325
for cd in sorted(cdates):
315326
dates.append(cd)
316327
e = True
@@ -319,8 +330,11 @@ def get_chunks(tper, index, size, stream_dates, ts_log_dates, cal, units, s):
319330
files[chunk_n] = {}
320331
s_cdates = sorted(cdates)
321332
files[chunk_n]['fn'] = sorted(cfiles)
322-
files[chunk_n]['start'] = get_cesm_date(cfiles[0],t='bb')
323-
files[chunk_n]['end'] = get_cesm_date(cfiles[-1],t='ee')
333+
if chunk_n > 0:
334+
files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='bb')
335+
else:
336+
files[chunk_n]['start'] = get_cesm_date(cfiles[0],tseries_tper,t='b')
337+
files[chunk_n]['end'] = get_cesm_date(cfiles[-1],tseries_tper,t='ee')
324338
for cd in sorted(cdates):
325339
dates.append(cd)
326340
chunk_n = chunk_n+1

0 commit comments

Comments
 (0)