Skip to content

Commit 2653393

Browse files
authored
Merge pull request #872 from suny-downstate-medical-center/development
PR from development to master - VERSION 1.1.1
2 parents 6a3d2f5 + d79cb57 commit 2653393

File tree

7 files changed

+85
-89
lines changed

7 files changed

+85
-89
lines changed

CHANGES.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1+
# Version 1.1.1
2+
3+
**Bug fixes**
4+
5+
- Fixed ignored synMech[Weight/Delay/Loc]Factor
6+
- Fixed issue with SSH based searches
7+
- Fixed issue with data and trial logging file creation
8+
19
# Version 1.1.0
210

311
**New features**
@@ -10,6 +18,8 @@
1018

1119
- Updated options for batch communication (see netpyne documentation: "running a batch job (beta)")
1220

21+
- new batchtools subpackage includes data and trial logging features.
22+
1323
**Bug fixes**
1424

1525
- Solve minor bug in the uniform field for extracelular stimulation (sign). Deposited the xtra.mod in support module.

doc/source/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,9 @@
6767
# built documents.
6868
#
6969
# The short X.Y version.
70-
version = '1.1.0'
70+
version = '1.1.1'
7171
# The full version, including alpha/beta/rc tags.
72-
release = '1.1.0'
72+
release = '1.1.1'
7373

7474
# The language for content autogenerated by Sphinx. Refer to documentation
7575
# for a list of supported languages.

doc/source/user_documentation.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2997,6 +2997,8 @@ Batch job handling is implemented from ``netpyne.batchtools.search``. Below is a
29972997
mode: Optional[str] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric)
29982998
sample_interval: Optional[int] = 15, # interval to poll for new results (in seconds)
29992999
attempt_restore: Optional[bool] = True, # whether to attempt to restore from a checkpoint
3000+
file_cleanup: Optional[bool] = True, # whether to clean up accessory files after the search is completed
3001+
advanced_logging: Optional[bool|str] = True, # advanced logging including generation of a batch .log file and an sqlite .db file (will be created in a timestamped directory by default).
30003002
) -> study: # results of the search
30013003
30023004
The default parameter search implemented with the ``search`` function uses ``ray.tune`` as the search algorithm backend, creates a `.csv` storing the results, and returns a ``study`` object containing the output. It takes the following two parameters:

netpyne/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
NetPyNE consists of a number of sub-packages and modules.
55
"""
66

7-
__version__ = '1.1.0'
7+
__version__ = '1.1.1'
88
import os, sys
99

1010
display = os.getenv('DISPLAY')

netpyne/batchtools/examples/CA3/optuna_search.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,9 @@
2929
comm_type = 'socket', # 'socket', 'sfs', None
3030
label = 'optuna',
3131
params = params,
32-
# output_path = './batch', # defaults to ./batch
33-
# checkpoint_path = './checkpoint', # defaults to ./checkpoint
32+
output_path = './batch',
33+
checkpoint_path = './checkpoint',
34+
advanced_logging = True,
3435
run_config = run_config,
3536
num_samples = 27,
3637
metric = 'loss',

netpyne/batchtools/search.py

Lines changed: 59 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,14 @@
77
from ray.tune.search import create_searcher, ConcurrencyLimiter, SEARCH_ALG_IMPORT
88
from netpyne.batchtools import runtk
99
from collections import namedtuple
10-
from batchtk.raytk.search import ray_trial, LABEL_POINTER
11-
from batchtk.utils import get_path
10+
from batchtk.utils import get_path, SQLiteStorage, ScriptLogger
1211
from io import StringIO
1312
import numpy
1413
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
1514
from netpyne.batchtools import submits
15+
from batchtk import runtk
16+
from batchtk.runtk.trial import trial, LABEL_POINTER
17+
import datetime
1618
#import signal #incompatible with signal and threading from ray
1719
#import threading
1820

@@ -84,62 +86,8 @@ def ray_optuna_search(dispatcher_constructor: Callable, # constructor for the di
8486
-------
8587
Study: namedtuple('Study', ['algo', 'results'])(algo, results), # named tuple containing the created algorithm and the results of the search
8688
"""
87-
from ray.tune.search.optuna import OptunaSearch
88-
89-
if ray_config is None:
90-
ray_config = {}
91-
ray_init_kwargs = ray_config#{"runtime_env": {"working_dir:": "."}} | ray_config # do not actually need to specify a working dir, can
92-
ray.init(**ray_init_kwargs)# TODO needed for python import statements ?
93-
if optuna_config == None:
94-
optuna_config = {}
95-
96-
storage_path = get_path(checkpoint_path)
97-
algo = ConcurrencyLimiter(searcher=OptunaSearch(metric=metric, mode=mode, **optuna_config),
98-
max_concurrent=max_concurrent,
99-
batch=batch) #TODO does max_concurrent and batch work?
100-
101-
#submit = submit_constructor()
102-
#submit.update_templates(
103-
# **run_config
104-
#)
105-
project_path = os.getcwd()
106-
107-
def run(config):
108-
config.update({'saveFolder': output_path, 'simLabel': LABEL_POINTER})
109-
data = ray_trial(config=config, label=label, dispatcher_constructor=dispatcher_constructor,
110-
project_path=project_path, output_path=output_path, submit_constructor=submit_constructor,
111-
submit_kwargs=run_config, log=None)
112-
if isinstance(metric, str):#TODO only Optuna supports multiobjective?
113-
metrics = {'config': config, 'data': data, metric: data[metric]}
114-
session.report(metrics)
115-
elif isinstance(metric, (list, tuple)):
116-
metrics = {k: data[k] for k in metric}
117-
metrics['config'] = config
118-
metrics['data'] = data
119-
session.report(metrics)
120-
else:
121-
raise ValueError("metric must be a string or a list/tuple of strings")
122-
tuner = tune.Tuner(
123-
run,
124-
tune_config=tune.TuneConfig(
125-
search_alg=algo,
126-
num_samples=num_samples,
127-
),
128-
run_config=RunConfig(
129-
storage_path=storage_path,
130-
name=label,
131-
),
132-
param_space=params,
133-
)
134-
135-
results = tuner.fit()
136-
resultsdf = results.get_dataframe()
137-
resultsdf.to_csv("{}.csv".format(label))
138-
#return namedtuple('Study', ['algo', 'results'])(algo, results)
139-
if clean_checkpoint:
140-
os.system("rm -r {}".format(storage_path))
141-
return namedtuple('Study', ['algo', 'results'])(algo.searcher._ot_study, results)
142-
89+
from warnings import warn
90+
warn("ray_optuna_search is deprecated, please use ray_search with algorithm='optuna' instead", DeprecationWarning)
14391
"""
14492
Parameters
14593
:
@@ -183,7 +131,7 @@ def ray_search(dispatcher_constructor: Callable, # constructor for the dispatche
183131
output_path: Optional[str] = './batch', # directory for storing generated files
184132
checkpoint_path: Optional[str] = './checkpoint', # directory for storing checkpoint files
185133
max_concurrent: Optional[int] = 1, # number of concurrent trials to run at one time
186-
batch: Optional[bool] = True, # whether concurrent trials should run synchronously or asynchronously
134+
batch: Optional[bool] = True, # whether concurrent trials should run synch\ronously or asynchronously
187135
num_samples: Optional[int] = 1, # number of trials to run
188136
metric: Optional[str] = None, # metric to optimize, if not supplied, no data will be collated.
189137
mode: Optional[str] = "min", # either 'min' or 'max' (whether to minimize or maximize the metric
@@ -196,18 +144,23 @@ def ray_search(dispatcher_constructor: Callable, # constructor for the dispatche
196144
prune_metadata = True, # whether to prune the metadata from the results.csv
197145
remote_dir: Optional[str] = None, # absolute path for directory to run the search on (for submissions over SSH)
198146
host: Optional[str] = None, # host to run the search on
199-
key: Optional[str] = None # key for TOTP generator...
147+
key: Optional[str] = None, # key for TOTP generator...
148+
file_cleanup: Optional[bool|list|tuple] = True, # whether to clean up accessory files after the search is completed
149+
advanced_logging: Optional[bool|str] = True,
200150
) -> study:
201151

202152
expected_total = params.pop('_expected_trials_per_sample') * num_samples
203153
if (dispatcher_constructor == runtk.dispatchers.SSHDispatcher) or \
204154
(dispatcher_constructor == SSHGridDispatcher):
205-
if submit_constructor == submits.SGESubmitSFS:
155+
dispatcher_kwargs = None
156+
if submit_constructor == submits.SGESubmitSSH:
206157
from fabric import connection
207158
dispatcher_kwargs = {'connection': connection.Connection(host)}
208159
if submit_constructor == submits.SlurmSubmitSSH:
209160
from batchtk.utils import TOTPConnection
210161
dispatcher_kwargs = {'connection': TOTPConnection(host, key)}
162+
if dispatcher_kwargs == None:
163+
raise ValueError("for SSH based methods, please provide either 'sftp' or None as the comm_type")
211164
else:
212165
dispatcher_kwargs = {}
213166
if ray_config is None:
@@ -233,6 +186,18 @@ def ray_search(dispatcher_constructor: Callable, # constructor for the dispatche
233186
#TODO class this object for self calls? cleaner? vs nested functions
234187
#TODO clean up working_dir and excludes
235188
storage_path = get_path(checkpoint_path)
189+
adv_path = None
190+
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
191+
if advanced_logging:
192+
if advanced_logging is True:
193+
advanced_logging = "./" #follows from os.getcwd()
194+
adv_path = get_path("{}/run_{}".format(advanced_logging, timestamp))
195+
if isinstance(advanced_logging, str):
196+
adv_path = get_path(advanced_logging)
197+
os.makedirs(adv_path, exist_ok=True)
198+
199+
if file_cleanup is True:
200+
file_cleanup = (runtk.SGLOUT, runtk.MSGOUT)
236201
load_path = "{}/{}".format(storage_path, label)
237202
algo = create_searcher(algorithm, **algorithm_config) #concurrency may not be accepted by all algo
238203
#search_alg – The search algorithm to use.
@@ -248,13 +213,29 @@ def ray_search(dispatcher_constructor: Callable, # constructor for the dispatche
248213
#submit.update_templates(
249214
# **run_config
250215
#)
216+
def ray_trial(config, label, dispatcher_constructor, project_path, output_path, submit_constructor,
217+
dispatcher_kwargs=None, submit_kwargs=None, interval=60, data_storage=None, debug_log=None,
218+
report=('path', 'config', 'data'), cleanup=(runtk.SGLOUT, runtk.MSGOUT), check_storage=False):
219+
debug_log, data_storage = None, None
220+
if adv_path:
221+
debug_log = ScriptLogger(file_out="{}/trials.log".format(adv_path))
222+
data_storage = SQLiteStorage(label='trials', path=adv_path, entries=('path', 'config', 'data'))
223+
tid = tune.get_context().get_trial_id()
224+
tid = tid.split('_')[-1] # value for trial (can be int/string)
225+
return trial(
226+
config=config, label=label, tid=tid, dispatcher_constructor=dispatcher_constructor,
227+
project_path=project_path, output_path=output_path, submit_constructor=submit_constructor,
228+
dispatcher_kwargs=dispatcher_kwargs, submit_kwargs=submit_kwargs, interval=interval,
229+
data_storage=data_storage, debug_log=debug_log, report=report, cleanup=cleanup, check_storage=check_storage)
230+
251231
project_path = remote_dir or os.getcwd() # if remote_dir is None, then use the current working directory
252232
def run(config):
253233
config.update({'saveFolder': output_path, 'simLabel': LABEL_POINTER})
254234
data = ray_trial(config=config, label=label, dispatcher_constructor=dispatcher_constructor,
255235
project_path=project_path, output_path=output_path, submit_constructor=submit_constructor,
256236
dispatcher_kwargs=dispatcher_kwargs, submit_kwargs=run_config,
257-
interval=sample_interval, log=None, report=report_config)
237+
interval=sample_interval, report=report_config,
238+
cleanup=file_cleanup, check_storage=False)
258239
if metric is None:
259240
metrics = {'data': data, '_none_placeholder': 0} #TODO, should include 'config' now with purge_metadata?
260241
session.report(metrics)
@@ -411,7 +392,9 @@ def shim(dispatcher_constructor: Optional[Callable] = None, # constructor for th
411392
prune_metadata: Optional[bool] = True, # whether to prune the metadata from the results.csv
412393
remote_dir: Optional[str] = None, # absolute path for directory to run the search on (for submissions over SSH)
413394
host: Optional[str] = None, # host to run the search on
414-
key: Optional[str] = None # key for TOTP generator...
395+
key: Optional[str] = None, # key for TOTP generator...
396+
file_cleanup: Optional[bool] = True, # whether to clean up accessory files after the search is completed
397+
advanced_logging: Optional[bool|str] = True,
415398
) -> Dict:
416399
kwargs = locals()
417400
if metric is None and algorithm not in ['variant_generator', 'random', 'grid']:
@@ -464,7 +447,9 @@ def search(dispatcher_constructor: Optional[Callable] = None, # constructor for
464447
prune_metadata: Optional[bool] = True, # whether to prune the metadata from the results.csv
465448
remote_dir: Optional[str] = None, # absolute path for directory to run the search on (for submissions over SSH)
466449
host: Optional[str] = None, # host to run the search on
467-
key: Optional[str] = None # key for TOTP generator.
450+
key: Optional[str] = None, # key for TOTP generator.
451+
file_cleanup: Optional[bool] = True, # whether to clean up accessory files after the search is completed
452+
advanced_logging: Optional[bool|str] = True,
468453
) -> study: # results of the search -> study.results (raw tune.ResultGrid), study.data (pandas.DataFrame conversion)
469454
"""
470455
search(...)
@@ -495,6 +480,10 @@ def search(dispatcher_constructor: Optional[Callable] = None, # constructor for
495480
remote_dir: Optional[str] = None, # absolute path for directory to run the search on (for submissions over SSH)
496481
host: Optional[str] = None, # host to run the search on (for submissions over SSH)
497482
key: Optional[str] = None # key for TOTP generator (for submissions over SSH)
483+
file_cleanup: Optional[bool] = True, # whether to clean up accessory files after the search is completed
484+
advanced_logging: Optional[bool] = True, # enables advanced logging features, checkpoint_db and log_file.
485+
checkpoint_db: Optional[str] = None, # path for checkpoint db file.
486+
log_file: Optional[str] = None, # path for the log file
498487
Creates (upon completed fitting run...)
499488
-------
500489
<label>.csv: file containing the results of the search
@@ -514,20 +503,20 @@ def search(dispatcher_constructor: Optional[Callable] = None, # constructor for
514503
"""
515504
SEE:
516505
'variant_generator'
517-
'random' -> points to variant_generator
506+
'random' <- deprecated -> points to variant_generator
518507
'ax'
519-
'dragonfly'
520-
'skopt'
508+
'dragonfly' <- deprecated
509+
'skopt' <- deprecated
521510
'hyperopt'
522511
'bayesopt'
523512
'bohb'
524513
'nevergrad'
525514
'optuna'
526515
'zoopt'
527-
'sigopt'
516+
'sigopt' <- deprecated
528517
'hebo'
529-
'blendsearch'
530-
'cfo'
518+
'blendsearch' <- deprecated
519+
'cfo' <- deprecated
531520
"""
532521

533522

netpyne/network/conn.py

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -949,15 +949,8 @@ def _addCellConn(self, connParam, preCellGid, postCellGid, preCellsTags={}):
949949

950950
for i, synMech in enumerate(synMechs):
951951

952-
# weight, delay and loc (and also sec - if corresp. flag is set) are either single value or list of values. If single value, use it for all synMechs. If it is a list, use the value at index i
953-
if connParam.get('distinctSecsPerSynMech', False):
954-
paramNames = ['weight', 'delay', 'loc', 'synsPerConn', 'sec']
955-
else:
956-
paramNames = ['weight', 'delay', 'loc', 'synsPerConn']
957-
# keep sec as is to be handled later in CompartCell.addConn()
958-
sec = connParam.get('sec')
959-
960-
for param in paramNames:
952+
# synsPerConn, weight, delay and loc are either single value or list of values. If single value, use it for all synMechs. If it is a list, use the value at index i
953+
for param in ['weight', 'delay', 'loc', 'synsPerConn']:
961954
if numSynMechs == 1:
962955
finalParamVal = finalParam.get(param)
963956
else:
@@ -966,10 +959,11 @@ def _addCellConn(self, connParam, preCellGid, postCellGid, preCellsTags={}):
966959
_ensure(len(finalParam[param]) == numSynMechs, connParam['label'], f"{param} should be {numSynMechs}-element list or a single value")
967960
finalParamVal = finalParam[param][i]
968961

969-
elif (f'synMech{param}Factor' in connParam) and (param is not 'sec'): # adapt weight/delay/loc for each synMech
970-
factors = connParam[f'synMech{param}Factor']
971-
_ensure(len(factors) == numSynMechs, connParam['label'], f"{f'synMech{param}Factor'} should be {numSynMechs}-element list")
972-
_ensure((type(finalParam[param]) in int, float), connParam['label'], f"{params} should be list of numbers")
962+
elif (param in ['weight', 'delay', 'loc']) and (synMechFactorParam := f'synMech{param.capitalize()}Factor') in connParam:
963+
# adapt weight/delay/loc for each synMech (e.g. 'synMechWeightFactor')
964+
factors = connParam[synMechFactorParam]
965+
_ensure(len(factors) == numSynMechs, connParam['label'], f"{synMechFactorParam} should be {numSynMechs}-element list")
966+
_ensure(isinstance(finalParam[param], Number), connParam['label'], f"'{param}' should be numeric")
973967
finalParamVal = finalParam[param] * factors[i]
974968

975969
else:
@@ -979,7 +973,7 @@ def _addCellConn(self, connParam, preCellGid, postCellGid, preCellsTags={}):
979973

980974
params = {
981975
'preGid': preCellGid,
982-
'sec': sec, # TODO: will not work with `distinctSecsPerSynMech`?
976+
'sec': connParam.get('sec'), # keep sec as is to be handled later in CompartCell.addConn()
983977
'loc': finalParam['locSynMech'],
984978
'synMech': synMech,
985979
'weight': finalParam['weightSynMech'],

0 commit comments

Comments
 (0)