Skip to content

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
[ghstack-poisoned]
  • Loading branch information
wconstab committed May 18, 2024
1 parent 513dd94 commit 4a4f642
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 2 deletions.
15 changes: 15 additions & 0 deletions test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,21 @@ def build_test_list(args):
"""
integration_tests_flavors = defaultdict(list)
integration_tests_flavors["debug_model.toml"] = [
OverrideDefinitions(
[
[
"--checkpoint.enable_checkpoint",
f"--job.dump_folder {args.output_dir}/pp_1f1b_3stage/",
"--experimental.pipeline_parallel_degree 3",
"--experimental.pipeline_parallel_split_points layers.1, layers.2",
"--experimental.pipeline_parallel_schedule 1f1b",
"--training.data_parallel_degree 1",
],
],
"PP 1D test 1f1b with 3 PP stages",
requires_seed_checkpoint=True,
ngpu=3,
),
OverrideDefinitions(
[
[
Expand Down
4 changes: 2 additions & 2 deletions torchtitan/models/llama/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
__all__ = ["Transformer"]

llama2_configs = {
"debugmodel": ModelArgs(dim=256, n_layers=2, n_heads=16),
"debugmodel": ModelArgs(dim=256, n_layers=3, n_heads=16),
"271M": ModelArgs(dim=1024, n_layers=16, n_heads=8),
"1B": ModelArgs(dim=2048, n_layers=18, n_heads=16),
"7B": ModelArgs(dim=4096, n_layers=32, n_heads=32),
Expand All @@ -29,7 +29,7 @@
}

llama3_configs = {
"debugmodel": ModelArgs(dim=256, n_layers=2, n_heads=16, rope_theta=500000),
"debugmodel": ModelArgs(dim=256, n_layers=3, n_heads=16, rope_theta=500000),
"8B": ModelArgs(
dim=4096,
n_layers=32,
Expand Down

0 comments on commit 4a4f642

Please sign in to comment.