Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion l2l/optimizers/crossentropy/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx

traj.f_add_result('$set.$.individual', self.eval_pop[ind_index])
traj.f_add_result('$set.$.fitness', fitness)
Expand Down
2 changes: 1 addition & 1 deletion l2l/optimizers/evolution/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx

traj.f_add_result('$set.$.individual', self.eval_pop[ind_index])
traj.f_add_result('$set.$.fitness', fitness)
Expand Down
4 changes: 2 additions & 2 deletions l2l/optimizers/evolutionstrategies/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __init__(self,
noise_std_shape = np.array(parameters.noise_std).shape
assert noise_std_shape == () or noise_std_shape == self.current_individual_arr.shape

traj.f_add_derived_parameter(
traj.f_add_parameter(
'dimension',
self.current_individual_arr.shape,
comment='The dimension of the parameter space of the optimizee')
Expand Down Expand Up @@ -189,7 +189,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure if it is too much to do but maybe having an individual_indexinstead ind_idx is much clearer.


traj.f_add_result('$set.$.individual', self.eval_pop[ind_index])
traj.f_add_result('$set.$.fitness', fitness)
Expand Down
6 changes: 3 additions & 3 deletions l2l/optimizers/face/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,10 @@ def __init__(self, traj, optimizee_create_individual, optimizee_fitness_weights,
traj.f_add_parameter('seed', np.uint32(parameters.seed),
comment='Random seed used by optimizer')

self.random_state = np.random.RandomState(seed=traj.par.seed)
self.random_state = np.random.RandomState(seed=traj.parameters.seed)
temp_indiv, self.optimizee_individual_dict_spec = dict_to_list(self.optimizee_create_individual(),
get_dict_spec=True)
traj.f_add_derived_parameter('dimension', len(temp_indiv),
traj.f_add_parameter('dimension', len(temp_indiv),
comment='The dimension of the parameter space of the optimizee')

# Added a generation-wise parameter logging
Expand Down Expand Up @@ -167,7 +167,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx

traj.f_add_result('$set.$.individual', self.eval_pop[ind_index])
traj.f_add_result('$set.$.fitness', fitness)
Expand Down
4 changes: 2 additions & 2 deletions l2l/optimizers/gradientdescent/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def __init__(self, traj,
traj.f_add_parameter('seed', np.uint32(parameters.seed), comment='Optimizer random seed')

_, self.optimizee_individual_dict_spec = dict_to_list(self.optimizee_create_individual(), get_dict_spec=True)
self.random_state = np.random.RandomState(seed=traj.par.seed)
self.random_state = np.random.RandomState(seed=traj.parameters.seed)

# Note that this array stores individuals as an np.array of floats as opposed to Individual-Dicts
# This is because this array is used within the context of the gradient descent algorithm and
Expand Down Expand Up @@ -185,7 +185,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx

individual = old_eval_pop[ind_index]

Expand Down
2 changes: 1 addition & 1 deletion l2l/optimizers/naturalevolutionstrategies/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx

traj.f_add_result('$set.$.individual', self.eval_pop[ind_index])
traj.f_add_result('$set.$.fitness', fitness)
Expand Down
2 changes: 1 addition & 1 deletion l2l/optimizers/paralleltempering/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx
individual = old_eval_pop[ind_index]

# Accept or reject the new solution
Expand Down
2 changes: 1 addition & 1 deletion l2l/optimizers/simulatedannealing/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def post_process(self, traj, fitnesses_results):
# We need to convert the current run index into an ind_idx
# (index of individual within one generation)
traj.v_idx = run_index
ind_index = traj.par.ind_idx
ind_index = traj.parameters.ind_idx
individual = old_eval_pop[ind_index]

# Accept or reject the new solution
Expand Down
53 changes: 25 additions & 28 deletions l2l/tests/test_all.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,27 @@
import unittest


from . import test_ce_optimizer
from . import test_ga_optimizer
from . import test_sa_optimizer
from . import test_gd_optimizer
from . import test_innerloop
from . import test_outerloop
from . import test_setup


def suite():

suite = unittest.TestSuite()
suite.addTest(test_setup.suite())
suite.addTest(test_outerloop.suite())
suite.addTest(test_innerloop.suite())
suite.addTest(test_ce_optimizer.suite())
suite.addTest(test_sa_optimizer.suite())
suite.addTest(test_gd_optimizer.suite())
suite.addTest(test_ga_optimizer.suite())

return suite


if __name__ == "__main__":

runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
import l2l.tests.test_ga_optimizer as test_ga_optimizer
import l2l.tests.test_sa_optimizer as test_sa_optimizer
import l2l.tests.test_gd_optimizer as test_gd_optimizer
import l2l.tests.test_gs_optimizer as test_gs_optimizer
import l2l.tests.test_pt_optimizer as test_pt_optimizer
import l2l.tests.test_face_optimizer as test_face_optimizer
import l2l.tests.test_es_optimizer as test_es_optimizer
import l2l.tests.test_setup as test_setup


suite = unittest.TestSuite()
loader = unittest.TestLoader()

suite.addTests(test_setup.suite())
suite.addTests(test_es_optimizer.suite())
suite.addTests(test_sa_optimizer.suite())
suite.addTests(test_gd_optimizer.suite())
suite.addTests(test_ga_optimizer.suite())
suite.addTests(test_gs_optimizer.suite())
suite.addTests(test_face_optimizer.suite())
suite.addTests(test_pt_optimizer.suite())
suite.addTests(test_es_optimizer.suite())

runner = unittest.TextTestRunner(verbosity=3)
result = runner.run(suite)
5 changes: 3 additions & 2 deletions l2l/tests/test_ce_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import unittest

import numpy as np
from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
from l2l.optimizers.crossentropy.distribution import NoisyGaussian
from l2l.optimizers.crossentropy import CrossEntropyOptimizer, CrossEntropyParameters

class CEOptimizerTestCase(OptimizerTestCase):

class CEOptimizerTestCase(TestCaseOptimizer):

def test_setup(self):

Expand Down
7 changes: 4 additions & 3 deletions l2l/tests/test_es_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import unittest

import numpy as np
from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
from l2l.optimizers.evolutionstrategies import EvolutionStrategiesParameters, EvolutionStrategiesOptimizer

class ESOptimizerTestCase(OptimizerTestCase):

class TestCaseESOptimizer(TestCaseOptimizer):

def test_setup(self):

Expand Down Expand Up @@ -41,7 +42,7 @@ def test_setup(self):


def suite():
suite = unittest.makeSuite(CEOptimizerTestCase, 'test')
suite = unittest.makeSuite(TestCaseESOptimizer, 'test')
return suite


Expand Down
4 changes: 2 additions & 2 deletions l2l/tests/test_face_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import unittest

import numpy as np
from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
from l2l.optimizers.crossentropy.distribution import Gaussian
from l2l.optimizers.face import FACEOptimizer, FACEParameters


class FACEOptimizerTestCase(OptimizerTestCase):
class FACEOptimizerTestCase(TestCaseOptimizer):

def test_setup(self):

Expand Down
4 changes: 2 additions & 2 deletions l2l/tests/test_ga_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import unittest

from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
from l2l.optimizers.evolution import GeneticAlgorithmOptimizer, GeneticAlgorithmParameters


class GAOptimizerTestCase(OptimizerTestCase):
class GAOptimizerTestCase(TestCaseOptimizer):

def test_setup(self):

Expand Down
4 changes: 2 additions & 2 deletions l2l/tests/test_gd_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
import numpy as np
from l2l.optimizers.gradientdescent.optimizer import GradientDescentOptimizer
from l2l.optimizers.gradientdescent.optimizer import RMSPropParameters
from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer

from l2l import list_to_dict


class GDOptimizerTestCase(OptimizerTestCase):
class GDOptimizerTestCase(TestCaseOptimizer):

def test_gd(self):
optimizer_parameters = RMSPropParameters(learning_rate=0.01, exploration_step_size=0.01,
Expand Down
4 changes: 2 additions & 2 deletions l2l/tests/test_gs_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import unittest

from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer

from l2l.optimizers.gridsearch import GridSearchOptimizer, GridSearchParameters

from l2l import list_to_dict


class GSOptimizerTestCase(OptimizerTestCase):
class GSOptimizerTestCase(TestCaseOptimizer):

def test_gd(self):
n_grid_divs_per_axis = 2
Expand Down
15 changes: 14 additions & 1 deletion l2l/tests/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from collections import namedtuple


class OptimizerTestCase(unittest.TestCase):
class TestCaseOptimizer(unittest.TestCase):

def setUp(self):
# Test function
Expand All @@ -23,3 +23,16 @@ def setUp(self):
jube_parameter=jube_params)
self.optimizee_parameters = namedtuple('OptimizeeParameters', [])
self.optimizee = FunctionGeneratorOptimizee(self.trajectory,benchmark_function, seed=1)

def suite():
suite = unittest.makeSuite(TestCaseOptimizer, 'test')
return suite


def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())


if __name__ == "__main__":
run()
4 changes: 2 additions & 2 deletions l2l/tests/test_pt_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import unittest

from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
import numpy as np
from l2l.optimizers.paralleltempering.optimizer import AvailableCoolingSchedules
from l2l.optimizers.paralleltempering.optimizer import ParallelTemperingParameters, ParallelTemperingOptimizer


class PTOptimizerTestCase(OptimizerTestCase):
class PTOptimizerTestCase(TestCaseOptimizer):

def test_sa(self):
cooling_schedules = [AvailableCoolingSchedules.EXPONENTIAL_ADDAPTIVE,
Expand Down
4 changes: 2 additions & 2 deletions l2l/tests/test_sa_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import unittest

from l2l.tests.test_optimizer import OptimizerTestCase
from l2l.tests.test_optimizer import TestCaseOptimizer
import numpy as np
from l2l.optimizers.simulatedannealing.optimizer import SimulatedAnnealingParameters, SimulatedAnnealingOptimizer, AvailableCoolingSchedules


class SAOptimizerTestCase(OptimizerTestCase):
class SAOptimizerTestCase(TestCaseOptimizer):

def test_sa(self):
optimizer_parameters = SimulatedAnnealingParameters(n_parallel_runs=1, noisy_step=.03, temp_decay=.99, n_iteration=1,
Expand Down
11 changes: 3 additions & 8 deletions l2l/tests/test_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,11 @@
import os


class SetupTestCase(unittest.TestCase):
class TestCaseSetup(unittest.TestCase):

def setUp(self):
self.name = "test_trajectory"
try:
with open('../../bin/path.conf') as f:
root_dir_path = f.read().strip()
except FileNotFoundError:
self.fail("L2L is not well configured. Missing path file.")
self.paths = Paths(self.name, dict(run_num='test'), root_dir_path=root_dir_path, suffix="-" + self.name)
self.paths = Paths(self.name, dict(run_num='test'), root_dir_path=".", suffix="-" + self.name)

def test_paths(self):
self.assertIsNotNone(self.paths)
Expand Down Expand Up @@ -105,7 +100,7 @@ def test_juberunner_setup(self):


def suite():
suite = unittest.makeSuite(SetupTestCase, 'test')
suite = unittest.makeSuite(TestCaseSetup, 'test')
return suite


Expand Down
2 changes: 1 addition & 1 deletion l2l/utils/JUBE_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(self, trajectory):
"""
self.trajectory = trajectory
self.done = False
if 'JUBE_params' not in self.trajectory.par.keys():
if 'JUBE_params' not in self.trajectory.parameters.keys():
raise KeyError("The trajectory must contain the parameter group JUBE_params")
args = self.trajectory.parameters["JUBE_params"].params

Expand Down
6 changes: 3 additions & 3 deletions l2l/utils/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ def run(self, runfunc):
:return: the results of running a whole generation. Dictionary indexed by generation id.
"""
result = {}
for it in range(self.trajectory.par['n_iteration']):
for it in range(self.trajectory.parameters['n_iteration']):
if self.multiprocessing:
# Multiprocessing is done through JUBE, either with or without scheduler
logging.info("Environment run starting JUBERunner for n iterations: " + str(self.trajectory.par['n_iteration']))
logging.info("Environment run starting JUBERunner for n iterations: " + str(self.trajectory.parameters['n_iteration']))
jube = JUBERunner(self.trajectory)
result[it] = []
# Initialize new JUBE run and execute it
Expand All @@ -67,7 +67,7 @@ def run(self, runfunc):
logger.exception("Error during serial execution of individuals")
raise
# Add results to the trajectory
self.trajectory.results.f_add_result_to_group("all_results", it, result[it])
self.trajectory.results.f_add_result(it, result[it])
self.trajectory.current_results = result[it]
# Perform the postprocessing step in order to generate the new parameter set
self.postprocessing(self.trajectory, result[it])
Expand Down
Loading