Skip to content

Commit 83c655a

Browse files
committed
remove unused code
1 parent 2f0ddb6 commit 83c655a

File tree

4 files changed

+1
-87
lines changed

4 files changed

+1
-87
lines changed

src/optimum/rbln/transformers/models/gemma3/modeling_gemma3.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -403,26 +403,6 @@ def _create_embedding_layer(self):
403403
)
404404
return embed_tokens
405405

406-
@classmethod
407-
def _update_sliding_window_config(cls, model_config: PretrainedConfig, rbln_config: RBLNGemma3ForCausalLMConfig):
408-
sliding_window = getattr(model_config, "sliding_window", None)
409-
sliding_window_pattern = getattr(model_config, "sliding_window_pattern", None)
410-
if sliding_window_pattern is None:
411-
if hasattr(model_config, "layer_types"):
412-
first_full_attention_index = model_config.layer_types.index("full_attention")
413-
sliding_window_pattern = first_full_attention_index + 1
414-
else:
415-
raise ValueError("Cannot determine sliding_window_pattern from model_config")
416-
417-
if sliding_window_pattern <= model_config.num_hidden_layers:
418-
rbln_config.cache_impl = "hybrid"
419-
rbln_config.sliding_window = sliding_window
420-
rbln_config.sliding_window_layers = [
421-
i for i in range(model_config.num_hidden_layers) if (i + 1) % sliding_window_pattern > 0
422-
]
423-
424-
return rbln_config
425-
426406
@classmethod
427407
def _update_submodule_config(
428408
cls,

src/optimum/rbln/transformers/models/mistral/modeling_mistral.py

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,11 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
from transformers import PretrainedConfig
1615

1716
from ....utils import logging
1817
from ...models.decoderonly import (
1918
RBLNDecoderOnlyModel,
2019
RBLNDecoderOnlyModelForCausalLM,
21-
RBLNDecoderOnlyModelForCausalLMConfig,
2220
)
2321
from .mistral_architecture import MistralWrapper
2422

@@ -85,16 +83,6 @@ class RBLNMistralForCausalLM(RBLNDecoderOnlyModelForCausalLM):
8583

8684
_decoder_wrapper_cls = MistralWrapper
8785

88-
@classmethod
89-
def _update_sliding_window_config(
90-
cls, model_config: PretrainedConfig, rbln_config: RBLNDecoderOnlyModelForCausalLMConfig
91-
):
92-
rbln_config.cache_impl = "sliding_window"
93-
rbln_config.sliding_window = model_config.sliding_window
94-
rbln_config.sliding_window_layers = list(range(model_config.num_hidden_layers))
95-
96-
return rbln_config
97-
9886

9987
class RBLNMistralModel(RBLNDecoderOnlyModel):
10088
"""
@@ -103,13 +91,3 @@ class RBLNMistralModel(RBLNDecoderOnlyModel):
10391
"""
10492

10593
_decoder_wrapper_cls = MistralWrapper
106-
107-
@classmethod
108-
def _update_sliding_window_config(
109-
cls, model_config: PretrainedConfig, rbln_config: RBLNDecoderOnlyModelForCausalLMConfig
110-
):
111-
rbln_config.cache_impl = "sliding_window"
112-
rbln_config.sliding_window = model_config.sliding_window
113-
rbln_config.sliding_window_layers = list(range(model_config.num_hidden_layers))
114-
115-
return rbln_config

src/optimum/rbln/transformers/models/qwen2/modeling_qwen2.py

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,11 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
from transformers import PretrainedConfig
1615

1716
from ....utils import logging
1817
from ...models.decoderonly import (
1918
RBLNDecoderOnlyModel,
2019
RBLNDecoderOnlyModelForCausalLM,
21-
RBLNDecoderOnlyModelForCausalLMConfig,
2220
)
2321
from .qwen2_architecture import QWEN2Wrapper
2422

@@ -87,19 +85,6 @@ class RBLNQwen2ForCausalLM(RBLNDecoderOnlyModelForCausalLM):
8785

8886
_decoder_wrapper_cls = QWEN2Wrapper
8987

90-
@classmethod
91-
def _update_sliding_window_config(
92-
cls, model_config: PretrainedConfig, rbln_config: RBLNDecoderOnlyModelForCausalLMConfig
93-
):
94-
# https://github.com/huggingface/transformers/issues/35896
95-
# There seems to be a bug in transformers(v4.52.4). Therefore, similar to when attn_implementation is eager,
96-
# we set all layers to use sliding window in this version. This should be updated once the bug is fixed.
97-
98-
rbln_config.cache_impl = "sliding_window"
99-
rbln_config.sliding_window = model_config.sliding_window
100-
rbln_config.sliding_window_layers = list(range(model_config.num_hidden_layers))
101-
return rbln_config
102-
10388

10489
class RBLNQwen2Model(RBLNDecoderOnlyModel):
10590
"""
@@ -108,16 +93,3 @@ class RBLNQwen2Model(RBLNDecoderOnlyModel):
10893
"""
10994

11095
_decoder_wrapper_cls = QWEN2Wrapper
111-
112-
@classmethod
113-
def _update_sliding_window_config(
114-
cls, model_config: PretrainedConfig, rbln_config: RBLNDecoderOnlyModelForCausalLMConfig
115-
):
116-
# https://github.com/huggingface/transformers/issues/35896
117-
# There seems to be a bug in transformers(v4.52.4). Therefore, similar to when attn_implementation is eager,
118-
# we set all layers to use sliding window in this version. This should be updated once the bug is fixed.
119-
120-
rbln_config.cache_impl = "sliding_window"
121-
rbln_config.sliding_window = model_config.sliding_window
122-
rbln_config.sliding_window_layers = list(range(model_config.num_hidden_layers))
123-
return rbln_config

src/optimum/rbln/transformers/models/qwen3/modeling_qwen3.py

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,21 +14,18 @@
1414

1515
from typing import TYPE_CHECKING
1616

17-
from transformers import PretrainedConfig
18-
1917
from ....utils import logging
2018
from ...models.decoderonly import (
2119
RBLNDecoderOnlyModel,
2220
RBLNDecoderOnlyModelForCausalLM,
23-
RBLNDecoderOnlyModelForCausalLMConfig,
2421
)
2522
from .qwen3_architecture import Qwen3Wrapper
2623

2724

2825
logger = logging.get_logger(__name__)
2926

3027
if TYPE_CHECKING:
31-
from transformers import PretrainedConfig
28+
pass
3229

3330

3431
class RBLNQwen3ForCausalLM(RBLNDecoderOnlyModelForCausalLM):
@@ -84,19 +81,6 @@ class RBLNQwen3ForCausalLM(RBLNDecoderOnlyModelForCausalLM):
8481

8582
_decoder_wrapper_cls = Qwen3Wrapper
8683

87-
@classmethod
88-
def _update_sliding_window_config(
89-
cls, model_config: PretrainedConfig, rbln_config: RBLNDecoderOnlyModelForCausalLMConfig
90-
):
91-
# https://github.com/huggingface/transformers/issues/35896
92-
# There seems to be a bug in transformers(v4.52.4). Therefore, similar to when attn_implementation is eager,
93-
# we set all layers to use sliding window in this version. This should be updated once the bug is fixed.
94-
95-
rbln_config.cache_impl = "sliding_window"
96-
rbln_config.sliding_window = model_config.sliding_window
97-
rbln_config.sliding_window_layers = list(range(model_config.num_hidden_layers))
98-
return rbln_config
99-
10084
def forward(self, *args, **kwargs):
10185
kwargs["return_dict"] = True
10286
return super().forward(*args, **kwargs)

0 commit comments

Comments
 (0)