Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion verl/workers/config/actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,6 @@ class FSDPActorConfig(ActorConfig):
entropy_checkpointing: bool = False
fsdp_config: FSDPEngineConfig = field(default_factory=FSDPEngineConfig)
use_remove_padding: bool = False
profiler: ProfilerConfig = field(default_factory=ProfilerConfig)
use_rollout_log_probs: bool = False

def __post_init__(self):
Expand Down
3 changes: 1 addition & 2 deletions verl/workers/config/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ class HFModelConfig(BaseConfig):
# path to pre-trained LoRA adapter to load for continued training
lora_adapter_path: Optional[str] = None
use_liger: bool = False
lora: dict = field(default_factory=dict)

use_fused_kernels: bool = False
fused_kernel_options: dict = field(default_factory=dict)
Expand Down Expand Up @@ -122,7 +121,7 @@ def __post_init__(self):
self.local_hf_config_path, trust_remote_code=self.trust_remote_code
)

# constuct hf_config
# construct hf_config
attn_implementation = self.override_config.get("attn_implementation", "flash_attention_2")
self.hf_config = AutoConfig.from_pretrained(
self.local_hf_config_path, trust_remote_code=self.trust_remote_code, attn_implementation=attn_implementation
Expand Down
Loading