-
Notifications
You must be signed in to change notification settings - Fork 231
transformers v5.5 #1684
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
transformers v5.5 #1684
Changes from 27 commits
e6df5a0
4fcd786
9b439b4
88d4f1a
c31a88b
da7c410
5ca59b8
661f0a5
b52550f
c5f9c47
ee941ff
4a77bce
06187d0
9857954
95a6efd
dbc13a9
fd94a59
43bd816
31ffbb4
8ae40d3
6c05f54
c09aab1
620f7a5
24982a5
a80912e
1652bc8
2699f22
d1a61da
dbdf3af
fd8d155
cef3755
888bfb9
60582f8
ad5aea6
f9788e3
ea462e4
9acdc49
588f89e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -28,8 +28,8 @@ | |
|
|
||
| INSTALL_REQUIRE = [ | ||
| "torch>=2.1", | ||
| "optimum-onnx@git+https://github.com/huggingface/optimum-onnx.git@transformers-v5", | ||
| "transformers>=4.45,<5.1", | ||
| "optimum-onnx@git+https://github.com/huggingface/optimum-onnx.git@xadupre/transformers5", | ||
| "transformers>=4.57,<5.5", | ||
|
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. setting min transformers version to v4.57, will replicate everywhere once validated cc @rkazants |
||
| "setuptools", | ||
| "huggingface-hub>=0.23.2,<2.0", | ||
| "nncf>=2.19.0", | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -97,7 +97,7 @@ | |
| TemporaryDirectory, | ||
| ) | ||
| from optimum.intel.pipelines import pipeline as optimum_pipeline | ||
| from optimum.intel.utils.import_utils import _langchain_hf_available, is_transformers_version | ||
| from optimum.intel.utils.import_utils import _langchain_hf_available, is_datasets_version, is_transformers_version | ||
| from optimum.intel.utils.modeling_utils import _find_files_matching_pattern | ||
| from optimum.utils import ( | ||
| DIFFUSION_MODEL_TEXT_ENCODER_2_SUBFOLDER, | ||
|
|
@@ -931,8 +931,8 @@ def test_pipeline(self, model_arch): | |
| @pytest.mark.run_slow | ||
| @slow | ||
| @pytest.mark.skipif( | ||
| is_transformers_version(">=", "5.3"), | ||
| reason="requires transformers < v5.3 since question-answering pipeline is deprecated in v5.3", | ||
| is_transformers_version(">=", "5.3") or is_datasets_version("<", "4"), | ||
| reason="requires datasets >= 4 or transformers < v5.3 since question-answering pipeline is deprecated in v5.3", | ||
| ) | ||
| def test_metric(self): | ||
| model_id = "distilbert-base-cased-distilled-squad" | ||
|
|
@@ -1113,7 +1113,6 @@ class OVModelForMaskedLMIntegrationTest(unittest.TestCase): | |
| "albert", | ||
| "bert", | ||
| "camembert", | ||
| "convbert", | ||
| "deberta", | ||
| "deberta-v2", | ||
| "distilbert", | ||
|
|
@@ -1131,13 +1130,16 @@ class OVModelForMaskedLMIntegrationTest(unittest.TestCase): | |
| ) | ||
|
|
||
| # accuracy issue, need additional investigation | ||
| if is_transformers_version("<", "4.51.0"): | ||
| if is_transformers_version("<", "4.51"): | ||
| SUPPORTED_ARCHITECTURES += ("nystromformer",) | ||
|
|
||
| # TODO: add fix for v5 and update MAX_TRANSFORMERS_VERSION accordingly | ||
| if is_transformers_version("<", "5"): | ||
| SUPPORTED_ARCHITECTURES += ("data2vec-text", "flaubert", "xlm") | ||
|
|
||
| if is_transformers_version("!=", "5.2"): | ||
| SUPPORTED_ARCHITECTURES += ("convbert",) | ||
|
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. failing in v5.2 https://github.com/huggingface/transformers/blob/v5.2.0/src/transformers/modeling_utils.py#L2315 fixed in v5.3 since huggingface/transformers@a64996e |
||
|
|
||
| @parameterized.expand(SUPPORTED_ARCHITECTURES) | ||
| def test_compare_to_transformers(self, model_arch): | ||
| model_id = MODEL_NAMES[model_arch] | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
before v5.2 attention_mask was created in
generateby calling_prepare_attention_mask_for_generationhttps://github.com/huggingface/transformers/blob/v5.1.0/src/transformers/generation/utils.py#L2530 not the case of encoder_decoder model since v5.2 https://github.com/huggingface/transformers/blob/v5.2.0/src/transformers/generation/utils.py#L2555