Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions areal/api/cli_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,14 @@ class FSDPEngineConfig:
},
)

shard_vision_across_sp: bool = field(
default=False,
metadata={
"help": "Shard vision encoder across SP ranks by image. "
"Only effective when context_parallel_size > 1."
},
)


@dataclass
class ArchonEngineConfig:
Expand Down
1 change: 1 addition & 0 deletions areal/engine/fsdp_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ def initialize(self, addr: str | None, ft_spec: FinetuneSpec, *args, **kwargs):
apply_monkey_patch(
model=self.model,
ulysses_sp_size=self.parallel_helper.sp_size,
shard_vision_across_sp=self.config.fsdp.shard_vision_across_sp,
)
# Monkey patch: replace attention's forward() with tree attention.
patch_fsdp_for_tree_training(enable=self.enable_tree_training)
Expand Down
8 changes: 8 additions & 0 deletions areal/models/transformers/ulyssess_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ def ulysses_wrapped_decoder_forward(self, *args, **kwargs):
def apply_monkey_patch(
model: PreTrainedModel,
ulysses_sp_size: int = 1,
shard_vision_across_sp: bool = False,
):
try:
num_attention_heads, num_key_value_heads = (
Expand Down Expand Up @@ -230,6 +231,13 @@ def apply_monkey_patch(

patch_vlm_for_ulysses_input_slicing(model_class)
logger.info(f"Patched {model_class_name}.forward")

if shard_vision_across_sp:
from areal.models.transformers.vision_sp_shard import (
apply_vision_sp_shard_patch,
)

apply_vision_sp_shard_patch()
else:
from transformers.integrations import flash_attention

Expand Down
Loading