You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
self.update(b*bsize-self.n) # will also set self.n = b * bsize
51
54
52
55
53
-
defdrmaa_nativespec(specs: str, session) ->str:
54
-
"""
55
-
Converts (CLI default) native Torque walltime and memory specifications to the DRMAA implementation (currently only Slurm is supported)
56
+
classCustomLogger(logging.Logger):
57
+
"""Extend the Logger class to add custom methods for the new levels"""
56
58
57
-
:param specs: Native Torque walltime and memory specifications, e.g. '-l walltime=00:10:00,mem=2gb'
58
-
:param session: The DRMAA session
59
-
:return: The converted native specifications
60
-
"""
61
-
62
-
jobmanager: str=session.drmaaImplementation
63
-
64
-
if'-l 'inspecsand'pbs'notinjobmanager.lower():
65
-
66
-
if'slurm'injobmanager.lower():
67
-
specs= (specs.replace('-l ', '')
68
-
.replace(',', ' ')
69
-
.replace('walltime', '--time')
70
-
.replace('mem', '--mem')
71
-
.replace('gb','000'))
72
-
else:
73
-
LOGGER.warning(f"Default `--cluster` native specifications are not (yet) provided for {jobmanager}. Please add them to your command if you get DRMAA errors")
LOGGER.error(f"{len(failedjobs)} HPC jobs failed to run:\n{failedjobs}\nThis may well be due to an underspecified `--cluster` input option (e.g. not enough memory)")
# Add the console streamhandler and bring some color to those boring logs! :-)
162
-
coloredlogs.install(level='BCDEBUG'ifDEBUGelse'VERBOSE'ifnotlogfile.nameelse'INFO', fmt=cfmt, datefmt=datefmt) # NB: Using tqdm sets the streamhandler level to 0, see: https://github.com/tqdm/tqdm/pull/1235
109
+
coloredlogs.install(level=BCDEBUGifDEBUGelseVERBOSEifnotlogfile.nameelse'INFO', fmt=cfmt, datefmt=datefmt) # NB: Using tqdm sets the streamhandler level to 0, see: https://github.com/tqdm/tqdm/pull/1235
Converts (CLI default) native Torque walltime and memory specifications to the DRMAA implementation (currently only Slurm is supported)
172
+
173
+
:param specs: Native Torque walltime and memory specifications, e.g. '-l walltime=00:10:00,mem=2gb'
174
+
:param session: The DRMAA session
175
+
:return: The converted native specifications
176
+
"""
177
+
178
+
jobmanager: str=session.drmaaImplementation
179
+
180
+
if'-l 'inspecsand'pbs'notinjobmanager.lower():
181
+
182
+
if'slurm'injobmanager.lower():
183
+
specs= (specs.replace('-l ', '')
184
+
.replace(',', ' ')
185
+
.replace('walltime', '--time')
186
+
.replace('mem', '--mem')
187
+
.replace('gb','000'))
188
+
else:
189
+
LOGGER.warning(f"Default `--cluster` native specifications are not (yet) provided for {jobmanager}. Please add them to your command if you get DRMAA errors")
LOGGER.error(f"{len(failedjobs)} HPC jobs failed to run:\n{failedjobs}\nThis may well be due to an underspecified `--cluster` input option (e.g. not enough memory)")
0 commit comments