|
| 1 | +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. |
| 2 | +# SPDX-FileCopyrightText: Copyright (c) 2024 Arc Institute. All rights reserved. |
| 3 | +# SPDX-FileCopyrightText: Copyright (c) 2024 Michael Poli. All rights reserved. |
| 4 | +# SPDX-FileCopyrightText: Copyright (c) 2024 Stanford University. All rights reserved |
| 5 | +# SPDX-License-Identifier: LicenseRef-Apache2 |
| 6 | +# |
| 7 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 8 | +# you may not use this file except in compliance with the License. |
| 9 | +# You may obtain a copy of the License at |
| 10 | +# |
| 11 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 12 | +# |
| 13 | +# Unless required by applicable law or agreed to in writing, software |
| 14 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 15 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 16 | +# See the License for the specific language governing permissions and |
| 17 | +# limitations under the License. |
| 18 | +import pytest |
| 19 | + |
| 20 | +from bionemo.testing.subprocess_utils import run_command_in_subprocess |
| 21 | + |
| 22 | +from .common import small_training_cmd, small_training_finetune_cmd |
| 23 | + |
| 24 | + |
| 25 | +@pytest.mark.timeout(512) # Optional: fail if the test takes too long. |
| 26 | +@pytest.mark.slow |
| 27 | +@pytest.mark.parametrize("with_peft", [True, False]) |
| 28 | +def test_train_evo2_finetune_runs_lora(tmp_path, with_peft: bool): |
| 29 | + """ |
| 30 | + This test runs the `train_evo2` command with mock data in a temporary directory. |
| 31 | + It uses the temporary directory provided by pytest as the working directory. |
| 32 | + The command is run in a subshell, and we assert that it returns an exit code of 0. |
| 33 | + """ |
| 34 | + num_steps = 2 |
| 35 | + # Note: The command assumes that `train_evo2` is in your PATH. |
| 36 | + command = small_training_cmd(tmp_path / "pretrain", max_steps=num_steps, val_check=num_steps) |
| 37 | + stdout_pretrain: str = run_command_in_subprocess(command=command, path=str(tmp_path)) |
| 38 | + assert "Restoring model weights from RestoreConfig(path='" not in stdout_pretrain |
| 39 | + |
| 40 | + log_dir = tmp_path / "pretrain" / "evo2" |
| 41 | + checkpoints_dir = log_dir / "checkpoints" |
| 42 | + tensorboard_dir = log_dir / "dev" |
| 43 | + |
| 44 | + # Check if logs dir exists |
| 45 | + assert log_dir.exists(), "Logs folder should exist." |
| 46 | + # Check if checkpoints dir exists |
| 47 | + assert checkpoints_dir.exists(), "Checkpoints folder does not exist." |
| 48 | + |
| 49 | + expected_checkpoint_suffix = f"{num_steps}.0-last" |
| 50 | + # Check if any subfolder ends with the expected suffix |
| 51 | + matching_subfolders = [ |
| 52 | + p for p in checkpoints_dir.iterdir() if p.is_dir() and (expected_checkpoint_suffix in p.name) |
| 53 | + ] |
| 54 | + |
| 55 | + assert matching_subfolders, ( |
| 56 | + f"No checkpoint subfolder ending with '{expected_checkpoint_suffix}' found in {checkpoints_dir}." |
| 57 | + ) |
| 58 | + |
| 59 | + # Check if directory with tensorboard logs exists |
| 60 | + assert tensorboard_dir.exists(), "TensorBoard logs folder does not exist." |
| 61 | + # Recursively search for files with tensorboard logger |
| 62 | + event_files = list(tensorboard_dir.rglob("events.out.tfevents*")) |
| 63 | + assert event_files, f"No TensorBoard event files found under {tensorboard_dir}" |
| 64 | + assert len(matching_subfolders) == 1, "Only one checkpoint subfolder should be found." |
| 65 | + if with_peft: |
| 66 | + result_dir = tmp_path / "lora_finetune" |
| 67 | + additional_args = "--lora-finetune" |
| 68 | + else: |
| 69 | + result_dir = tmp_path / "finetune" |
| 70 | + additional_args = "" |
| 71 | + |
| 72 | + command_finetune = small_training_finetune_cmd( |
| 73 | + result_dir, |
| 74 | + max_steps=num_steps, |
| 75 | + val_check=num_steps, |
| 76 | + prev_ckpt=matching_subfolders[0], |
| 77 | + create_tflops_callback=not with_peft, |
| 78 | + additional_args=additional_args, |
| 79 | + ) |
| 80 | + stdout_finetune: str = run_command_in_subprocess(command=command_finetune, path=str(tmp_path)) |
| 81 | + assert "Restoring model weights from RestoreConfig(path='" in stdout_finetune |
| 82 | + |
| 83 | + log_dir_ft = result_dir / "evo2" |
| 84 | + checkpoints_dir_ft = log_dir_ft / "checkpoints" |
| 85 | + tensorboard_dir_ft = log_dir_ft / "dev" |
| 86 | + |
| 87 | + # Check if logs dir exists |
| 88 | + assert log_dir_ft.exists(), "Logs folder should exist." |
| 89 | + # Check if checkpoints dir exists |
| 90 | + assert checkpoints_dir_ft.exists(), "Checkpoints folder does not exist." |
| 91 | + |
| 92 | + expected_checkpoint_suffix = f"{num_steps}.0-last" |
| 93 | + # Check if any subfolder ends with the expected suffix |
| 94 | + matching_subfolders_finetune = [ |
| 95 | + p for p in checkpoints_dir_ft.iterdir() if p.is_dir() and (expected_checkpoint_suffix in p.name) |
| 96 | + ] |
| 97 | + |
| 98 | + assert matching_subfolders_finetune, ( |
| 99 | + f"No checkpoint subfolder ending with '{expected_checkpoint_suffix}' found in {checkpoints_dir_ft}." |
| 100 | + ) |
| 101 | + |
| 102 | + # Check if directory with tensorboard logs exists |
| 103 | + assert tensorboard_dir_ft.exists(), "TensorBoard logs folder does not exist." |
| 104 | + # Recursively search for files with tensorboard logger |
| 105 | + event_files = list(tensorboard_dir_ft.rglob("events.out.tfevents*")) |
| 106 | + assert event_files, f"No TensorBoard event files found under {tensorboard_dir_ft}" |
| 107 | + |
| 108 | + assert len(matching_subfolders_finetune) == 1, "Only one checkpoint subfolder should be found." |
| 109 | + |
| 110 | + # With LoRA, test resuming from a saved LoRA checkpoint |
| 111 | + if with_peft: |
| 112 | + result_dir = tmp_path / "lora_finetune_resume" |
| 113 | + |
| 114 | + # Resume from LoRA checkpoint |
| 115 | + command_resume_finetune = small_training_finetune_cmd( |
| 116 | + result_dir, |
| 117 | + max_steps=num_steps, |
| 118 | + val_check=num_steps, |
| 119 | + prev_ckpt=matching_subfolders[0], |
| 120 | + create_tflops_callback=False, |
| 121 | + additional_args=f"--lora-finetune --lora-checkpoint-path {matching_subfolders_finetune[0]}", |
| 122 | + ) |
| 123 | + stdout_finetune: str = run_command_in_subprocess(command=command_resume_finetune, path=str(tmp_path)) |
| 124 | + |
| 125 | + log_dir_ft = result_dir / "evo2" |
| 126 | + checkpoints_dir_ft = log_dir_ft / "checkpoints" |
| 127 | + tensorboard_dir_ft = log_dir_ft / "dev" |
| 128 | + |
| 129 | + # Check if logs dir exists |
| 130 | + assert log_dir_ft.exists(), "Logs folder should exist." |
| 131 | + # Check if checkpoints dir exists |
| 132 | + assert checkpoints_dir_ft.exists(), "Checkpoints folder does not exist." |
0 commit comments