diff --git a/l2l/optimizers/crossentropy/optimizer.py b/l2l/optimizers/crossentropy/optimizer.py index 92b19198..2147c116 100644 --- a/l2l/optimizers/crossentropy/optimizer.py +++ b/l2l/optimizers/crossentropy/optimizer.py @@ -171,7 +171,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx traj.f_add_result('$set.$.individual', self.eval_pop[ind_index]) traj.f_add_result('$set.$.fitness', fitness) diff --git a/l2l/optimizers/evolution/optimizer.py b/l2l/optimizers/evolution/optimizer.py index 49ffaac3..54550874 100644 --- a/l2l/optimizers/evolution/optimizer.py +++ b/l2l/optimizers/evolution/optimizer.py @@ -128,7 +128,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx traj.f_add_result('$set.$.individual', self.eval_pop[ind_index]) traj.f_add_result('$set.$.fitness', fitness) diff --git a/l2l/optimizers/evolutionstrategies/optimizer.py b/l2l/optimizers/evolutionstrategies/optimizer.py index 8606f1a3..309cdb9f 100644 --- a/l2l/optimizers/evolutionstrategies/optimizer.py +++ b/l2l/optimizers/evolutionstrategies/optimizer.py @@ -128,7 +128,7 @@ def __init__(self, noise_std_shape = np.array(parameters.noise_std).shape assert noise_std_shape == () or noise_std_shape == self.current_individual_arr.shape - traj.f_add_derived_parameter( + traj.f_add_parameter( 'dimension', self.current_individual_arr.shape, comment='The dimension of the parameter space of the optimizee') @@ -189,7 +189,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx traj.f_add_result('$set.$.individual', self.eval_pop[ind_index]) traj.f_add_result('$set.$.fitness', fitness) diff --git a/l2l/optimizers/face/optimizer.py b/l2l/optimizers/face/optimizer.py index ce59ec16..233e57bd 100644 --- a/l2l/optimizers/face/optimizer.py +++ b/l2l/optimizers/face/optimizer.py @@ -107,10 +107,10 @@ def __init__(self, traj, optimizee_create_individual, optimizee_fitness_weights, traj.f_add_parameter('seed', np.uint32(parameters.seed), comment='Random seed used by optimizer') - self.random_state = np.random.RandomState(seed=traj.par.seed) + self.random_state = np.random.RandomState(seed=traj.parameters.seed) temp_indiv, self.optimizee_individual_dict_spec = dict_to_list(self.optimizee_create_individual(), get_dict_spec=True) - traj.f_add_derived_parameter('dimension', len(temp_indiv), + traj.f_add_parameter('dimension', len(temp_indiv), comment='The dimension of the parameter space of the optimizee') # Added a generation-wise parameter logging @@ -167,7 +167,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx traj.f_add_result('$set.$.individual', self.eval_pop[ind_index]) traj.f_add_result('$set.$.fitness', fitness) diff --git a/l2l/optimizers/gradientdescent/optimizer.py b/l2l/optimizers/gradientdescent/optimizer.py index f8abd2f3..edc0a6cf 100644 --- a/l2l/optimizers/gradientdescent/optimizer.py +++ b/l2l/optimizers/gradientdescent/optimizer.py @@ -119,7 +119,7 @@ def __init__(self, traj, traj.f_add_parameter('seed', np.uint32(parameters.seed), comment='Optimizer random seed') _, self.optimizee_individual_dict_spec = dict_to_list(self.optimizee_create_individual(), get_dict_spec=True) - self.random_state = np.random.RandomState(seed=traj.par.seed) + self.random_state = np.random.RandomState(seed=traj.parameters.seed) # Note that this array stores individuals as an np.array of floats as opposed to Individual-Dicts # This is because this array is used within the context of the gradient descent algorithm and @@ -185,7 +185,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx individual = old_eval_pop[ind_index] diff --git a/l2l/optimizers/naturalevolutionstrategies/optimizer.py b/l2l/optimizers/naturalevolutionstrategies/optimizer.py index 49af4743..e4e3a0f9 100644 --- a/l2l/optimizers/naturalevolutionstrategies/optimizer.py +++ b/l2l/optimizers/naturalevolutionstrategies/optimizer.py @@ -210,7 +210,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx traj.f_add_result('$set.$.individual', self.eval_pop[ind_index]) traj.f_add_result('$set.$.fitness', fitness) diff --git a/l2l/optimizers/paralleltempering/optimizer.py b/l2l/optimizers/paralleltempering/optimizer.py index 25f94767..79b8d821 100644 --- a/l2l/optimizers/paralleltempering/optimizer.py +++ b/l2l/optimizers/paralleltempering/optimizer.py @@ -307,7 +307,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx individual = old_eval_pop[ind_index] # Accept or reject the new solution diff --git a/l2l/optimizers/simulatedannealing/optimizer.py b/l2l/optimizers/simulatedannealing/optimizer.py index 026a5fa2..c001c2cf 100644 --- a/l2l/optimizers/simulatedannealing/optimizer.py +++ b/l2l/optimizers/simulatedannealing/optimizer.py @@ -225,7 +225,7 @@ def post_process(self, traj, fitnesses_results): # We need to convert the current run index into an ind_idx # (index of individual within one generation) traj.v_idx = run_index - ind_index = traj.par.ind_idx + ind_index = traj.parameters.ind_idx individual = old_eval_pop[ind_index] # Accept or reject the new solution diff --git a/l2l/tests/test_all.py b/l2l/tests/test_all.py index 5dc4bfa9..30391463 100644 --- a/l2l/tests/test_all.py +++ b/l2l/tests/test_all.py @@ -1,30 +1,27 @@ import unittest - -from . import test_ce_optimizer -from . import test_ga_optimizer -from . import test_sa_optimizer -from . import test_gd_optimizer -from . import test_innerloop -from . import test_outerloop -from . import test_setup - - -def suite(): - - suite = unittest.TestSuite() - suite.addTest(test_setup.suite()) - suite.addTest(test_outerloop.suite()) - suite.addTest(test_innerloop.suite()) - suite.addTest(test_ce_optimizer.suite()) - suite.addTest(test_sa_optimizer.suite()) - suite.addTest(test_gd_optimizer.suite()) - suite.addTest(test_ga_optimizer.suite()) - - return suite - - -if __name__ == "__main__": - - runner = unittest.TextTestRunner(verbosity=2) - runner.run(suite()) \ No newline at end of file +import l2l.tests.test_ga_optimizer as test_ga_optimizer +import l2l.tests.test_sa_optimizer as test_sa_optimizer +import l2l.tests.test_gd_optimizer as test_gd_optimizer +import l2l.tests.test_gs_optimizer as test_gs_optimizer +import l2l.tests.test_pt_optimizer as test_pt_optimizer +import l2l.tests.test_face_optimizer as test_face_optimizer +import l2l.tests.test_es_optimizer as test_es_optimizer +import l2l.tests.test_setup as test_setup + + +suite = unittest.TestSuite() +loader = unittest.TestLoader() + +suite.addTests(test_setup.suite()) +suite.addTests(test_es_optimizer.suite()) +suite.addTests(test_sa_optimizer.suite()) +suite.addTests(test_gd_optimizer.suite()) +suite.addTests(test_ga_optimizer.suite()) +suite.addTests(test_gs_optimizer.suite()) +suite.addTests(test_face_optimizer.suite()) +suite.addTests(test_pt_optimizer.suite()) +suite.addTests(test_es_optimizer.suite()) + +runner = unittest.TextTestRunner(verbosity=3) +result = runner.run(suite) \ No newline at end of file diff --git a/l2l/tests/test_ce_optimizer.py b/l2l/tests/test_ce_optimizer.py index 03a19342..7547ce4b 100644 --- a/l2l/tests/test_ce_optimizer.py +++ b/l2l/tests/test_ce_optimizer.py @@ -1,11 +1,12 @@ import unittest import numpy as np -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l.optimizers.crossentropy.distribution import NoisyGaussian from l2l.optimizers.crossentropy import CrossEntropyOptimizer, CrossEntropyParameters -class CEOptimizerTestCase(OptimizerTestCase): + +class CEOptimizerTestCase(TestCaseOptimizer): def test_setup(self): diff --git a/l2l/tests/test_es_optimizer.py b/l2l/tests/test_es_optimizer.py index c773e7b6..0cecf0ca 100644 --- a/l2l/tests/test_es_optimizer.py +++ b/l2l/tests/test_es_optimizer.py @@ -1,10 +1,11 @@ import unittest import numpy as np -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l.optimizers.evolutionstrategies import EvolutionStrategiesParameters, EvolutionStrategiesOptimizer -class ESOptimizerTestCase(OptimizerTestCase): + +class TestCaseESOptimizer(TestCaseOptimizer): def test_setup(self): @@ -41,7 +42,7 @@ def test_setup(self): def suite(): - suite = unittest.makeSuite(CEOptimizerTestCase, 'test') + suite = unittest.makeSuite(TestCaseESOptimizer, 'test') return suite diff --git a/l2l/tests/test_face_optimizer.py b/l2l/tests/test_face_optimizer.py index 10bf0e78..489e5e10 100644 --- a/l2l/tests/test_face_optimizer.py +++ b/l2l/tests/test_face_optimizer.py @@ -1,12 +1,12 @@ import unittest import numpy as np -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l.optimizers.crossentropy.distribution import Gaussian from l2l.optimizers.face import FACEOptimizer, FACEParameters -class FACEOptimizerTestCase(OptimizerTestCase): +class FACEOptimizerTestCase(TestCaseOptimizer): def test_setup(self): diff --git a/l2l/tests/test_ga_optimizer.py b/l2l/tests/test_ga_optimizer.py index 7c6af208..add29513 100644 --- a/l2l/tests/test_ga_optimizer.py +++ b/l2l/tests/test_ga_optimizer.py @@ -1,10 +1,10 @@ import unittest -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters -class GAOptimizerTestCase(OptimizerTestCase): +class GAOptimizerTestCase(TestCaseOptimizer): def test_setup(self): diff --git a/l2l/tests/test_gd_optimizer.py b/l2l/tests/test_gd_optimizer.py index 62f532d8..390d1f83 100644 --- a/l2l/tests/test_gd_optimizer.py +++ b/l2l/tests/test_gd_optimizer.py @@ -3,12 +3,12 @@ import numpy as np from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer from l2l.optimizers.gradientdescent.optimizer import RMSPropParameters -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l import list_to_dict -class GDOptimizerTestCase(OptimizerTestCase): +class GDOptimizerTestCase(TestCaseOptimizer): def test_gd(self): optimizer_parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01, diff --git a/l2l/tests/test_gs_optimizer.py b/l2l/tests/test_gs_optimizer.py index f0c018a5..8dee16e1 100644 --- a/l2l/tests/test_gs_optimizer.py +++ b/l2l/tests/test_gs_optimizer.py @@ -1,13 +1,13 @@ import unittest -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters from l2l import list_to_dict -class GSOptimizerTestCase(OptimizerTestCase): +class GSOptimizerTestCase(TestCaseOptimizer): def test_gd(self): n_grid_divs_per_axis = 2 diff --git a/l2l/tests/test_optimizer.py b/l2l/tests/test_optimizer.py index bb90755a..1e2a19e6 100644 --- a/l2l/tests/test_optimizer.py +++ b/l2l/tests/test_optimizer.py @@ -7,7 +7,7 @@ from collections import namedtuple -class OptimizerTestCase(unittest.TestCase): +class TestCaseOptimizer(unittest.TestCase): def setUp(self): # Test function @@ -23,3 +23,16 @@ def setUp(self): jube_parameter=jube_params) self.optimizee_parameters = namedtuple('OptimizeeParameters', []) self.optimizee = FunctionGeneratorOptimizee(self.trajectory,benchmark_function, seed=1) + +def suite(): + suite = unittest.makeSuite(TestCaseOptimizer, 'test') + return suite + + +def run(): + runner = unittest.TextTestRunner(verbosity=2) + runner.run(suite()) + + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/l2l/tests/test_pt_optimizer.py b/l2l/tests/test_pt_optimizer.py index d7073ce9..c421d7f2 100644 --- a/l2l/tests/test_pt_optimizer.py +++ b/l2l/tests/test_pt_optimizer.py @@ -1,12 +1,12 @@ import unittest -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer import numpy as np from l2l.optimizers.paralleltempering.optimizer import AvailableCoolingSchedules from l2l.optimizers.paralleltempering.optimizer import ParallelTemperingParameters, ParallelTemperingOptimizer -class PTOptimizerTestCase(OptimizerTestCase): +class PTOptimizerTestCase(TestCaseOptimizer): def test_sa(self): cooling_schedules = [AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE, diff --git a/l2l/tests/test_sa_optimizer.py b/l2l/tests/test_sa_optimizer.py index 9cb7a77e..b140a783 100644 --- a/l2l/tests/test_sa_optimizer.py +++ b/l2l/tests/test_sa_optimizer.py @@ -1,11 +1,11 @@ import unittest -from l2l.tests.test_optimizer import OptimizerTestCase +from l2l.tests.test_optimizer import TestCaseOptimizer import numpy as np from l2l.optimizers.simulatedannealing.optimizer import SimulatedAnnealingParameters, SimulatedAnnealingOptimizer, AvailableCoolingSchedules -class SAOptimizerTestCase(OptimizerTestCase): +class SAOptimizerTestCase(TestCaseOptimizer): def test_sa(self): optimizer_parameters = SimulatedAnnealingParameters(n_parallel_runs=1, noisy_step=.03, temp_decay=.99, n_iteration=1, diff --git a/l2l/tests/test_setup.py b/l2l/tests/test_setup.py index 38dd92f3..ad2b5a00 100644 --- a/l2l/tests/test_setup.py +++ b/l2l/tests/test_setup.py @@ -11,16 +11,11 @@ import os -class SetupTestCase(unittest.TestCase): +class TestCaseSetup(unittest.TestCase): def setUp(self): self.name = "test_trajectory" - try: - with open('../../bin/path.conf') as f: - root_dir_path = f.read().strip() - except FileNotFoundError: - self.fail("L2L is not well configured. Missing path file.") - self.paths = Paths(self.name, dict(run_num='test'), root_dir_path=root_dir_path, suffix="-" + self.name) + self.paths = Paths(self.name, dict(run_num='test'), root_dir_path=".", suffix="-" + self.name) def test_paths(self): self.assertIsNotNone(self.paths) @@ -105,7 +100,7 @@ def test_juberunner_setup(self): def suite(): - suite = unittest.makeSuite(SetupTestCase, 'test') + suite = unittest.makeSuite(TestCaseSetup, 'test') return suite diff --git a/l2l/utils/JUBE_runner.py b/l2l/utils/JUBE_runner.py index 8ea0f3cf..526fa808 100644 --- a/l2l/utils/JUBE_runner.py +++ b/l2l/utils/JUBE_runner.py @@ -24,7 +24,7 @@ def __init__(self, trajectory): """ self.trajectory = trajectory self.done = False - if 'JUBE_params' not in self.trajectory.par.keys(): + if 'JUBE_params' not in self.trajectory.parameters.keys(): raise KeyError("The trajectory must contain the parameter group JUBE_params") args = self.trajectory.parameters["JUBE_params"].params diff --git a/l2l/utils/environment.py b/l2l/utils/environment.py index 1d3be336..ecc14533 100644 --- a/l2l/utils/environment.py +++ b/l2l/utils/environment.py @@ -38,10 +38,10 @@ def run(self, runfunc): :return: the results of running a whole generation. Dictionary indexed by generation id. """ result = {} - for it in range(self.trajectory.par['n_iteration']): + for it in range(self.trajectory.parameters['n_iteration']): if self.multiprocessing: # Multiprocessing is done through JUBE, either with or without scheduler - logging.info("Environment run starting JUBERunner for n iterations: " + str(self.trajectory.par['n_iteration'])) + logging.info("Environment run starting JUBERunner for n iterations: " + str(self.trajectory.parameters['n_iteration'])) jube = JUBERunner(self.trajectory) result[it] = [] # Initialize new JUBE run and execute it @@ -67,7 +67,7 @@ def run(self, runfunc): logger.exception("Error during serial execution of individuals") raise # Add results to the trajectory - self.trajectory.results.f_add_result_to_group("all_results", it, result[it]) + self.trajectory.results.f_add_result(it, result[it]) self.trajectory.current_results = result[it] # Perform the postprocessing step in order to generate the new parameter set self.postprocessing(self.trajectory, result[it]) diff --git a/l2l/utils/trajectory.py b/l2l/utils/trajectory.py index dd6068be..64034354 100644 --- a/l2l/utils/trajectory.py +++ b/l2l/utils/trajectory.py @@ -16,19 +16,14 @@ class Trajectory: def __init__(self, **keyword_args): """ Initializes the trajectory. Some parameters are kept to match the interface with the pypet trajectory. - TODO: remove all irrelevant attributes and simplify the class """ if 'name' in keyword_args: self._name = keyword_args['name'] self._timestamp = time.time() - self._parameters = ParameterDict(self) # Contains all parameters - self._results = {} # Contains all results - self.individual = Individual() + self.parameters = ParameterDict(self) # Contains all parameters self.results = ResultGroup() - self.results.f_add_result_group('all_results', "Contains all the results") self.current_results = {} - self._parameters.parameter_group = {} - self._parameters.parameter = {} + self.individual = Individual() self.individuals = {} self.v_idx = 0 @@ -38,7 +33,7 @@ def f_add_parameter_group(self, name, comment=""): :param name: name of the new parameter group :param comment: ignored for the moment. Kept to match pypet interface. """ - self._parameters[name] = ParameterGroup() + self.parameters[name] = ParameterGroup() logging.info("Added new parameter group: " + name) def f_add_parameter_to_group(self, group_name, key, val): @@ -51,8 +46,8 @@ def f_add_parameter_to_group(self, group_name, key, val): Throws an exception if the group does not exist """ - if group_name in self._parameters.keys(): - self._parameters[group_name].f_add_parameter(key, val) + if group_name in self.parameters.keys(): + self.parameters[group_name].f_add_parameter(key, val) else: # LOG("Key not found when adding to result group") raise Exception("Group name not found when adding value to result group") @@ -62,12 +57,8 @@ def f_add_result(self,key, val, comment=""): Adds a result to the trajectory :param key: it identifies either a generation params result group or another result :param val: The value to be added to the results - TODO: verify where is the generation_params call performed """ - if key == 'generation_params': - self.results[key] = ResultGroup() - else: - self._results[key] = val + self.results[key] = val def f_add_parameter(self, key, val, comment=""): """ @@ -76,16 +67,7 @@ def f_add_parameter(self, key, val, comment=""): :param val: Value of the parameter :param comment """ - self._parameters[key] = val - - def f_add_derived_parameter(self, key, val, comment=""): - """ - Adds a derived parameter to the trajectory. Match the previous pypet interface. - :param key: Name of the parameter - :param val: Value of the parameter - :param comment: - """ - self.f_add_parameter(key,val,comment) + self.parameters[key] = val def f_expand(self, build_dict, fail_safe=True): """ @@ -117,7 +99,7 @@ def f_expand(self, build_dict, fail_safe=True): logging.info("Expanded trajectory for generation: " + str(generation)) def __str__(self): - return str(self._parameters) + return str(self.parameters) def __getattr__(self, attr): """ @@ -128,20 +110,19 @@ def __getattr__(self, attr): if '.' in attr: # This is triggered exclusively in the case where __getattr__ is called from __getitem__ attrs = attr.split('.') - ret = self._parameters.get(attrs[0]) + ret = self.parameters.get(attrs[0]) for at in attrs[1:]: ret = ret[at] - elif attr == 'par' or attr == 'parameters': - ret = self._parameters + elif attr == 'parameters': + ret = self.parameters else: - ret = self._parameters.get(attr,default_value=None) + ret = self.parameters.get(attr,default_value=None) return ret def __getitem__(self, key): return self.__getattr__(key) def __getstate__(self): - # print(self.__dict__) return self.__dict__ def __setstate__(self, d):