Skip to content

Commit 54684c7

Browse files
authored
Merge pull request #254 from toshihikoyanase/update-black-target-version
Update target version of black from Python 3.8 to 3.9.
2 parents 33a9f1d + f27bbb9 commit 54684c7

File tree

4 files changed

+31
-23
lines changed

4 files changed

+31
-23
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ version = {attr = "optuna_integration.version.__version__"}
169169

170170
[tool.black]
171171
line-length = 99
172-
target-version = ['py38']
172+
target-version = ['py39']
173173
exclude = '''
174174
/(
175175
\.eggs

tests/botorch/test_botorch.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -476,11 +476,12 @@ def test_botorch_n_startup_trials() -> None:
476476
sampler = BoTorchSampler(n_startup_trials=2, independent_sampler=independent_sampler)
477477
study = optuna.create_study(directions=["minimize", "maximize"], sampler=sampler)
478478

479-
with patch.object(
480-
independent_sampler, "sample_independent", wraps=independent_sampler.sample_independent
481-
) as mock_independent, patch.object(
482-
sampler, "sample_relative", wraps=sampler.sample_relative
483-
) as mock_relative:
479+
with (
480+
patch.object(
481+
independent_sampler, "sample_independent", wraps=independent_sampler.sample_independent
482+
) as mock_independent,
483+
patch.object(sampler, "sample_relative", wraps=sampler.sample_relative) as mock_relative,
484+
):
484485
study.optimize(
485486
lambda t: [t.suggest_float("x0", 0, 1), t.suggest_float("x1", 0, 1)], n_trials=3
486487
)

tests/cma/test_cma.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -122,13 +122,16 @@ def test_sample_relative_n_startup_trials() -> None:
122122

123123
# The independent sampler is used for Trial#0 and Trial#1.
124124
# The CMA-ES is used for Trial#2.
125-
with patch.object(
126-
independent_sampler,
127-
"sample_independent",
128-
wraps=independent_sampler.sample_independent,
129-
) as mock_independent, patch.object(
130-
sampler, "sample_relative", wraps=sampler.sample_relative
131-
) as mock_relative:
125+
with (
126+
patch.object(
127+
independent_sampler,
128+
"sample_independent",
129+
wraps=independent_sampler.sample_independent,
130+
) as mock_independent,
131+
patch.object(
132+
sampler, "sample_relative", wraps=sampler.sample_relative
133+
) as mock_relative,
134+
):
132135
study.optimize(
133136
lambda t: t.suggest_int("x", -1, 1) + t.suggest_int("y", -1, 1),
134137
n_trials=3,

tests/lightgbm/test_optimize.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -512,17 +512,19 @@ def test_when_a_step_does_not_improve_best_score(self) -> None:
512512
tuner = LightGBMTuner(params, dataset, valid_sets=valid_sets)
513513
assert not tuner.higher_is_better()
514514

515-
with mock.patch("lightgbm.train"), mock.patch.object(
516-
_BaseTuner, "_get_booster_best_score", return_value=0.9
515+
with (
516+
mock.patch("lightgbm.train"),
517+
mock.patch.object(_BaseTuner, "_get_booster_best_score", return_value=0.9),
517518
):
518519
tuner.tune_feature_fraction()
519520

520521
assert "feature_fraction" in tuner.best_params
521522
assert tuner.best_score == 0.9
522523

523524
# Assume that tuning `num_leaves` doesn't improve the `best_score`.
524-
with mock.patch("lightgbm.train"), mock.patch.object(
525-
_BaseTuner, "_get_booster_best_score", return_value=1.1
525+
with (
526+
mock.patch("lightgbm.train"),
527+
mock.patch.object(_BaseTuner, "_get_booster_best_score", return_value=1.1),
526528
):
527529
tuner.tune_num_leaves()
528530

@@ -562,9 +564,10 @@ def test_run_show_progress_bar(self, show_progress_bar: bool, expected: int) ->
562564
show_progress_bar=show_progress_bar,
563565
)
564566

565-
with mock.patch.object(
566-
_BaseTuner, "_get_booster_best_score", return_value=1.0
567-
), mock.patch("tqdm.tqdm") as mock_tqdm:
567+
with (
568+
mock.patch.object(_BaseTuner, "_get_booster_best_score", return_value=1.0),
569+
mock.patch("tqdm.tqdm") as mock_tqdm,
570+
):
568571
tuner.run()
569572

570573
assert mock_tqdm.call_count == expected
@@ -918,9 +921,10 @@ def test_run_show_progress_bar(self, show_progress_bar: bool, expected: int) ->
918921
params, dataset, study=study, time_budget=1, show_progress_bar=show_progress_bar
919922
)
920923

921-
with mock.patch.object(
922-
_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]
923-
), mock.patch("tqdm.tqdm") as mock_tqdm:
924+
with (
925+
mock.patch.object(_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]),
926+
mock.patch("tqdm.tqdm") as mock_tqdm,
927+
):
924928
tuner.run()
925929

926930
assert mock_tqdm.call_count == expected

0 commit comments

Comments
 (0)