From 6376560024fdc4939c650e0450f313fb64cd278b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 20:22:00 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- example.ipynb | 52 +++++++++++++--------- tests/conftest.py | 14 +++--- tests/test_client_support.py | 4 +- tests/test_database_manager.py | 12 ++--- tests/test_job_manager.py | 16 +++---- tests/test_kill_manager.py | 10 ++--- tests/test_log_generation.py | 4 +- tests/test_run_manager.py | 10 ++--- tests/test_server_support_common.py | 2 +- tests/test_slurm_run.py | 4 +- tests/test_utils_file_creation_progress.py | 2 +- 11 files changed, 70 insertions(+), 60 deletions(-) diff --git a/example.ipynb b/example.ipynb index c0003019..c658d30d 100644 --- a/example.ipynb +++ b/example.ipynb @@ -23,13 +23,15 @@ "outputs": [], "source": [ "import numpy as np\n", + "\n", "import adaptive_scheduler\n", - "import random\n", + "\n", "\n", "def h(x, width=0.01, offset=0):\n", " for _ in range(10): # Burn some CPU time just because\n", " np.linalg.eig(np.random.rand(1000, 1000))\n", - " return x + width ** 2 / (width ** 2 + (x - offset) ** 2)\n", + " return x + width**2 / (width**2 + (x - offset) ** 2)\n", + "\n", "\n", "# Define the sequence/samples we want to run\n", "xs = np.linspace(0, 1, 10_000)\n", @@ -37,7 +39,9 @@ "# ⚠️ Here a `learner` is an `adaptive` concept, read it as `jobs`.\n", "# ⚠️ `fnames` are the result locations\n", "learners, fnames = adaptive_scheduler.utils.split_sequence_in_sequence_learners(\n", - " h, xs, n_learners=10\n", + " h,\n", + " xs,\n", + " n_learners=10,\n", ")\n", "\n", "run_manager = adaptive_scheduler.slurm_run(\n", @@ -48,7 +52,7 @@ " nodes=1, # number of nodes per `learner`\n", " cores_per_node=1, # number of cores on 1 node per `learner`\n", " log_interval=5, # how often to produce a log message\n", - " save_interval=5, # how often to save the results\n", + " save_interval=5, # how often to save the results\n", ")\n", "run_manager.start()" ] @@ -85,18 +89,18 @@ "from functools import partial\n", "\n", "import adaptive\n", + "\n", "import adaptive_scheduler\n", "\n", "\n", "def h(x, width=0.01, offset=0):\n", " import numpy as np\n", - " import random\n", "\n", " for _ in range(10): # Burn some CPU time just because\n", " np.linalg.eig(np.random.rand(1000, 1000))\n", "\n", " a = width\n", - " return x + a ** 2 / (a ** 2 + (x - offset) ** 2)\n", + " return x + a**2 / (a**2 + (x - offset) ** 2)\n", "\n", "\n", "offsets = [i / 10 - 0.5 for i in range(5)]\n", @@ -266,16 +270,16 @@ "outputs": [], "source": [ "import numpy as np\n", - "\n", "from adaptive import SequenceLearner\n", - "from adaptive_scheduler.utils import split, combo_to_fname\n", + "\n", + "from adaptive_scheduler.utils import split\n", "\n", "\n", "def g(xyz):\n", " x, y, z = xyz\n", " for _ in range(5): # Burn some CPU time just because\n", " np.linalg.eig(np.random.rand(1000, 1000))\n", - " return x ** 2 + y ** 2 + z ** 2\n", + " return x**2 + y**2 + z**2\n", "\n", "\n", "xs = np.linspace(0, 10, 11)\n", @@ -302,11 +306,17 @@ "\n", "\n", "scheduler = adaptive_scheduler.scheduler.DefaultScheduler(\n", - " cores=10, executor_type=\"ipyparallel\",\n", + " cores=10,\n", + " executor_type=\"ipyparallel\",\n", ") # PBS or SLURM\n", "\n", "run_manager2 = adaptive_scheduler.server_support.RunManager(\n", - " scheduler, learners, fnames, goal=goal, log_interval=30, save_interval=30,\n", + " scheduler,\n", + " learners,\n", + " fnames,\n", + " goal=goal,\n", + " log_interval=30,\n", + " save_interval=30,\n", ")\n", "run_manager2.start()" ] @@ -343,11 +353,11 @@ "outputs": [], "source": [ "import numpy as np\n", - "\n", "from adaptive import SequenceLearner\n", - "from adaptive_scheduler.utils import split, combo2fname\n", "from adaptive.utils import named_product\n", "\n", + "from adaptive_scheduler.utils import combo2fname\n", + "\n", "\n", "def g(combo):\n", " x, y, z = combo[\"x\"], combo[\"y\"], combo[\"z\"]\n", @@ -355,7 +365,7 @@ " for _ in range(5): # Burn some CPU time just because\n", " np.linalg.eig(np.random.rand(1000, 1000))\n", "\n", - " return x ** 2 + y ** 2 + z ** 2\n", + " return x**2 + y**2 + z**2\n", "\n", "\n", "combos = named_product(x=np.linspace(0, 10), y=np.linspace(-1, 1), z=np.linspace(-3, 3))\n", @@ -364,7 +374,7 @@ "\n", "# We could run this as 1 job with N nodes, but we can also split it up in multiple jobs.\n", "# This is desireable when you don't want to run a single job with 300 nodes for example.\n", - "# Note that \n", + "# Note that\n", "# `adaptive_scheduler.utils.split_sequence_in_sequence_learners(g, combos, 100, \"data\")`\n", "# does the same!\n", "\n", @@ -372,7 +382,7 @@ "split_combos = list(split(combos, njobs))\n", "\n", "print(\n", - " f\"Length of split_combos: {len(split_combos)} and length of split_combos[0]: {len(split_combos[0])}.\"\n", + " f\"Length of split_combos: {len(split_combos)} and length of split_combos[0]: {len(split_combos[0])}.\",\n", ")\n", "\n", "learners = [SequenceLearner(g, combos_part) for combos_part in split_combos]\n", @@ -393,17 +403,16 @@ "outputs": [], "source": [ "from functools import partial\n", + "\n", "import adaptive_scheduler\n", - "from adaptive_scheduler.scheduler import DefaultScheduler, PBS, SLURM\n", + "from adaptive_scheduler.scheduler import SLURM, DefaultScheduler\n", "\n", "\n", "def goal(learner):\n", " return learner.done() # the standard goal for a SequenceLearner\n", "\n", "\n", - "extra_scheduler = (\n", - " [\"--exclusive\", \"--time=24:00:00\"] if DefaultScheduler is SLURM else []\n", - ")\n", + "extra_scheduler = [\"--exclusive\", \"--time=24:00:00\"] if DefaultScheduler is SLURM else []\n", "\n", "scheduler = adaptive_scheduler.scheduler.DefaultScheduler(\n", " cores=10,\n", @@ -459,7 +468,8 @@ "source": [ "run_manager3.load_learners() # load the data into the learners\n", "result = sum(\n", - " [l.result() for l in learners], []\n", + " [l.result() for l in learners],\n", + " [],\n", ") # combine all learner's result into 1 list" ] } diff --git a/tests/conftest.py b/tests/conftest.py index e7b26783..e476c2bf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,13 +23,13 @@ import zmq.asyncio -@pytest.fixture() +@pytest.fixture def mock_scheduler(tmp_path: Path) -> MockScheduler: """Fixture for creating a MockScheduler instance.""" return MockScheduler(log_folder=str(tmp_path), cores=8) -@pytest.fixture() +@pytest.fixture def db_manager( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] @@ -99,14 +99,14 @@ def fnames( raise NotImplementedError(msg) -@pytest.fixture() +@pytest.fixture def socket(db_manager: DatabaseManager) -> zmq.asyncio.Socket: """Fixture for creating a ZMQ socket.""" with get_socket(db_manager) as socket: yield socket -@pytest.fixture() +@pytest.fixture def job_manager( db_manager: DatabaseManager, mock_scheduler: MockScheduler, @@ -116,7 +116,7 @@ def job_manager( return JobManager(job_names, db_manager, mock_scheduler, interval=0.05) -@pytest.fixture() +@pytest.fixture def _mock_slurm_partitions_output() -> Generator[None, None, None]: """Mock `slurm_partitions` function.""" mock_output = "hb120v2-low\nhb60-high\nnc24-low*\nnd40v2-mpi\n" @@ -125,7 +125,7 @@ def _mock_slurm_partitions_output() -> Generator[None, None, None]: yield -@pytest.fixture() +@pytest.fixture def _mock_slurm_partitions() -> Generator[None, None, None]: """Mock `slurm_partitions` function.""" with ( @@ -141,7 +141,7 @@ def _mock_slurm_partitions() -> Generator[None, None, None]: yield -@pytest.fixture() +@pytest.fixture def _mock_slurm_queue() -> Generator[None, None, None]: """Mock `SLURM.queue` function.""" with patch( diff --git a/tests/test_client_support.py b/tests/test_client_support.py index 434a130b..7d652b3c 100644 --- a/tests/test_client_support.py +++ b/tests/test_client_support.py @@ -27,7 +27,7 @@ def client(zmq_url: str) -> zmq.Socket: return client -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_get_learner(zmq_url: str) -> None: """Test `get_learner` function.""" with tempfile.NamedTemporaryFile() as tmpfile: @@ -94,7 +94,7 @@ async def test_get_learner(zmq_url: str) -> None: mock_log.exception.assert_called_with("got an exception") -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_tell_done(zmq_url: str) -> None: """Test `tell_done` function.""" fname = "test_learner_file.pkl" diff --git a/tests/test_database_manager.py b/tests/test_database_manager.py index 26396969..b958b311 100644 --- a/tests/test_database_manager.py +++ b/tests/test_database_manager.py @@ -102,7 +102,7 @@ def test_simple_database_get_all(tmp_path: Path) -> None: assert done_entries[1][1].fname == "file3.txt" -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_database_manager_start_and_cancel(db_manager: DatabaseManager) -> None: """Test starting and canceling the DatabaseManager.""" db_manager.start() @@ -172,7 +172,7 @@ def test_database_manager_as_dicts( ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_database_manager_dispatch_start_stop( db_manager: DatabaseManager, learners: list[adaptive.Learner1D] @@ -205,7 +205,7 @@ async def test_database_manager_dispatch_start_stop( assert entry.is_done is True -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_database_manager_start_and_update( socket: zmq.asyncio.Socket, db_manager: DatabaseManager, @@ -259,7 +259,7 @@ async def test_database_manager_start_and_update( assert entry.job_id is None -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_database_manager_start_stop( socket: zmq.asyncio.Socket, db_manager: DatabaseManager, @@ -322,7 +322,7 @@ async def test_database_manager_start_stop( await send_message(socket, start_message) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_database_manager_stop_request_and_requests( socket: zmq.asyncio.Socket, db_manager: DatabaseManager, @@ -531,7 +531,7 @@ def test_ensure_str_invalid_input(invalid_input: list[str]) -> None: _ensure_str(invalid_input) # type: ignore[arg-type] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_dependencies( db_manager: DatabaseManager, fnames: list[str] | list[Path], diff --git a/tests/test_job_manager.py b/tests/test_job_manager.py index b0022bba..97c9a75c 100644 --- a/tests/test_job_manager.py +++ b/tests/test_job_manager.py @@ -12,7 +12,7 @@ from adaptive_scheduler.server_support import JobManager, MaxRestartsReachedError -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_init(job_manager: JobManager) -> None: """Test the initialization of JobManager.""" job_manager.database_manager.start() @@ -20,7 +20,7 @@ async def test_job_manager_init(job_manager: JobManager) -> None: assert job_manager.task is not None -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_queued(job_manager: JobManager) -> None: """Test the _queued method of JobManager.""" job_manager.scheduler.start_job("job1") @@ -30,7 +30,7 @@ async def test_job_manager_queued(job_manager: JobManager) -> None: assert job_manager._queued(job_manager.scheduler.queue()) == {"job1", "job2"} -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_max_restarts_reached(job_manager: JobManager) -> None: """Test the JobManager when the maximum restarts are reached.""" job_manager.n_started = 105 @@ -48,7 +48,7 @@ async def test_job_manager_manage_max_restarts_reached(job_manager: JobManager) job_manager.task.result() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_start_jobs(job_manager: JobManager) -> None: """Test the JobManager when managing the start of jobs.""" job_manager.database_manager.n_done = MagicMock(return_value=0) # type: ignore[method-assign] @@ -60,7 +60,7 @@ async def test_job_manager_manage_start_jobs(job_manager: JobManager) -> None: assert set(job_manager.scheduler._started_jobs) == {"job1", "job2"} # type: ignore[attr-defined] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_start_max_simultaneous_jobs( job_manager: JobManager, ) -> None: @@ -76,7 +76,7 @@ async def test_job_manager_manage_start_max_simultaneous_jobs( assert len(job_manager.scheduler._started_jobs) == 1 # type: ignore[attr-defined] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_cancelled_error( job_manager: JobManager, caplog: pytest.LogCaptureFixture, @@ -100,7 +100,7 @@ async def test_job_manager_manage_cancelled_error( assert "task was cancelled because of a CancelledError" in caplog.text -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_n_done_equal_job_names( job_manager: JobManager, ) -> None: @@ -116,7 +116,7 @@ async def test_job_manager_manage_n_done_equal_job_names( assert job_manager.task.result() is None -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_job_manager_manage_generic_exception( job_manager: JobManager, caplog: pytest.LogCaptureFixture, diff --git a/tests/test_kill_manager.py b/tests/test_kill_manager.py index 6c488ed6..69f71320 100644 --- a/tests/test_kill_manager.py +++ b/tests/test_kill_manager.py @@ -22,7 +22,7 @@ INTERVAL = 0.05 -@pytest.fixture() +@pytest.fixture def kill_manager(db_manager: DatabaseManager) -> KillManager: """Fixture for creating a KillManager instance.""" return KillManager( @@ -35,7 +35,7 @@ def kill_manager(db_manager: DatabaseManager) -> KillManager: ) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_kill_manager_init(kill_manager: KillManager) -> None: """Test KillManager initialization.""" assert kill_manager.scheduler is not None @@ -139,7 +139,7 @@ def test_logs_with_string_or_condition_missing_file() -> None: assert len(result) == 0 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_kill_manager_manage(kill_manager: KillManager) -> None: """Test KillManager.manage method.""" # The KillManager will read from the .out files, which are determined @@ -161,7 +161,7 @@ async def test_kill_manager_manage(kill_manager: KillManager) -> None: assert str(output_file_path) in kill_manager.deleted -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_kill_manager_manage_exception( kill_manager: KillManager, caplog: pytest.LogCaptureFixture, @@ -187,7 +187,7 @@ async def test_kill_manager_manage_exception( assert "TypeError" in caplog.text -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_kill_manager_manage_canceled( kill_manager: KillManager, caplog: pytest.LogCaptureFixture, diff --git a/tests/test_log_generation.py b/tests/test_log_generation.py index 26d96add..16b38928 100644 --- a/tests/test_log_generation.py +++ b/tests/test_log_generation.py @@ -33,7 +33,7 @@ def expected_log_keys(learner: adaptive.BaseLearner) -> list[str]: return expected_keys -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_get_log_entry( learners: list[adaptive.Learner1D] | list[adaptive.BalancingLearner] @@ -58,7 +58,7 @@ async def test_get_log_entry( assert all(key in result for key in expected_keys) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_log_info( learners: list[adaptive.Learner1D] | list[adaptive.BalancingLearner] diff --git a/tests/test_run_manager.py b/tests/test_run_manager.py index 7a799a2b..02943ae7 100644 --- a/tests/test_run_manager.py +++ b/tests/test_run_manager.py @@ -38,7 +38,7 @@ def test_run_manager_init( assert isinstance(rm, RunManager) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_run_manager_start_and_cancel( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] @@ -103,7 +103,7 @@ def test_run_manager_load_learners( assert learner.data -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_run_manager_elapsed_time( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] @@ -122,7 +122,7 @@ async def test_run_manager_elapsed_time( assert rm.elapsed_time() > 0 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_run_manager_status( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] @@ -191,7 +191,7 @@ def test_run_manager_load_dataframes( rm.load_dataframes() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_start_one_by_one( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] @@ -223,7 +223,7 @@ def goal(rm: RunManager) -> bool: rm2.cancel() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_run_manager_auto_restart( mock_scheduler: MockScheduler, learners: list[adaptive.Learner1D] diff --git a/tests/test_server_support_common.py b/tests/test_server_support_common.py index 47021fe0..933a7b37 100644 --- a/tests/test_server_support_common.py +++ b/tests/test_server_support_common.py @@ -18,7 +18,7 @@ from .helpers import MockScheduler, temporary_working_directory -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_periodically_clean_ipython_profiles( mock_scheduler: MockScheduler, ) -> None: diff --git a/tests/test_slurm_run.py b/tests/test_slurm_run.py index ec3658ad..fd45ac40 100644 --- a/tests/test_slurm_run.py +++ b/tests/test_slurm_run.py @@ -18,13 +18,13 @@ from adaptive_scheduler.utils import _DATAFRAME_FORMATS -@pytest.fixture() +@pytest.fixture def extra_run_manager_kwargs() -> dict[str, Any]: """Fixture for creating extra run manager keyword arguments.""" return {"kill_on_error": "GPU on fire", "loky_start_method": "fork"} -@pytest.fixture() +@pytest.fixture def extra_scheduler_kwargs() -> dict[str, Any]: """Fixture for creating extra scheduler keyword arguments.""" return {"mpiexec_executable": "mpiexec"} diff --git a/tests/test_utils_file_creation_progress.py b/tests/test_utils_file_creation_progress.py index 9ba998cc..9332f077 100644 --- a/tests/test_utils_file_creation_progress.py +++ b/tests/test_utils_file_creation_progress.py @@ -69,7 +69,7 @@ def test_remove_completed_paths(tmp_path: Path) -> None: assert paths_dict == {"category1": {tmp_path / "file2"}, "category2": set()} -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_track_file_creation_progress(tmp_path: Path) -> None: """Test the track file creation progress function.""" # Create test files