Skip to content

Commit b1a205b

Browse files
committed
remove deprecated (transformers v5) method use from examples, bump CI to use transformers v5
1 parent 185d3c0 commit b1a205b

File tree

4 files changed

+33
-23
lines changed

4 files changed

+33
-23
lines changed

requirements/ci/requirements-oldest.txt

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,10 @@ coverage==6.4
8686
# finetuning-scheduler (pyproject.toml:dev)
8787
# finetuning-scheduler (pyproject.toml:test)
8888
# nbval
89-
cryptography==46.0.3 ; sys_platform == 'linux'
90-
# via secretstorage
89+
cryptography==46.0.3
90+
# via
91+
# google-auth
92+
# secretstorage
9193
cycler==0.12.1
9294
# via matplotlib
9395
databricks-sdk==0.80.0
@@ -152,7 +154,7 @@ gitdb==4.0.12
152154
# via gitpython
153155
gitpython==3.1.46
154156
# via mlflow-skinny
155-
google-auth==2.47.0
157+
google-auth==2.48.0
156158
# via databricks-sdk
157159
graphene==3.4.3
158160
# via mlflow
@@ -300,7 +302,7 @@ more-itertools==10.8.0
300302
# via
301303
# jaraco-classes
302304
# jaraco-functools
303-
multidict==6.7.0
305+
multidict==6.7.1
304306
# via
305307
# aiohttp
306308
# yarl
@@ -565,7 +567,7 @@ send2trash==2.1.0
565567
# via notebook
566568
sentencepiece==0.2.0
567569
# via finetuning-scheduler (pyproject.toml)
568-
setuptools==80.10.1
570+
setuptools==80.10.2
569571
# via
570572
# ipython
571573
# lightning-utilities
@@ -696,7 +698,7 @@ virtualenv==20.36.1
696698
# via pre-commit
697699
waitress==3.0.2 ; sys_platform == 'win32'
698700
# via mlflow
699-
wcwidth==0.3.1
701+
wcwidth==0.4.0
700702
# via prompt-toolkit
701703
webencodings==0.5.1
702704
# via bleach

requirements/ci/requirements.txt

Lines changed: 23 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# This file was autogenerated by uv via the following command:
2-
# uv pip compile pyproject.toml --extra all --group dev --group test --output-file /home/speediedan/repos/finetuning-scheduler/requirements/ci/requirements.txt --no-strip-extras --resolution highest --universal --python-version 3.10 --prerelease=if-necessary-or-explicit --override /tmp/tmp.cMRqtRMZwC --index-strategy unsafe-best-match --no-emit-package torch
2+
# uv pip compile pyproject.toml --extra all --group dev --group test --output-file /home/speediedan/repos/finetuning-scheduler/requirements/ci/requirements.txt --no-strip-extras --resolution highest --universal --python-version 3.10 --prerelease=if-necessary-or-explicit --override /tmp/tmp.x0ulCdHmJD --index-strategy unsafe-best-match --no-emit-package torch
33
aiohappyeyeballs==2.6.1
44
# via aiohttp
55
aiohttp==3.13.3
@@ -50,7 +50,7 @@ bleach[css]==6.3.0
5050
# via nbconvert
5151
blinker==1.9.0
5252
# via flask
53-
cachetools==6.2.4
53+
cachetools==6.2.5
5454
# via
5555
# mlflow-skinny
5656
# mlflow-tracing
@@ -72,6 +72,7 @@ click==8.3.1
7272
# via
7373
# flask
7474
# mlflow-skinny
75+
# typer-slim
7576
# uvicorn
7677
cloudpickle==3.1.2
7778
# via mlflow-skinny
@@ -89,13 +90,14 @@ contourpy==1.3.2 ; python_full_version < '3.11'
8990
# via matplotlib
9091
contourpy==1.3.3 ; python_full_version >= '3.11'
9192
# via matplotlib
92-
coverage==7.13.1
93+
coverage==7.13.2
9394
# via
9495
# finetuning-scheduler (pyproject.toml:dev)
9596
# finetuning-scheduler (pyproject.toml:test)
9697
# nbval
9798
cryptography==46.0.3
9899
# via
100+
# google-auth
99101
# mlflow
100102
# secretstorage
101103
cycler==0.12.1
@@ -173,7 +175,7 @@ gitdb==4.0.12
173175
# via gitpython
174176
gitpython==3.1.46
175177
# via mlflow-skinny
176-
google-auth==2.47.0
178+
google-auth==2.48.0
177179
# via databricks-sdk
178180
graphene==3.4.3
179181
# via mlflow
@@ -191,17 +193,18 @@ h11==0.16.0
191193
# via
192194
# httpcore
193195
# uvicorn
194-
hf-xet==1.2.0 ; platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'
196+
hf-xet==1.2.0 ; platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'
195197
# via huggingface-hub
196198
httpcore==1.0.9
197199
# via httpx
198200
httpx==0.28.1
199201
# via
200202
# datasets
203+
# huggingface-hub
201204
# jupyterlab
202205
huey==2.6.0
203206
# via mlflow
204-
huggingface-hub==0.36.0
207+
huggingface-hub==1.3.4
205208
# via
206209
# datasets
207210
# evaluate
@@ -327,7 +330,7 @@ jupyterlab-server==2.28.0
327330
# notebook
328331
jupyterlab-widgets==3.0.16
329332
# via ipywidgets
330-
jupytext==1.19.0
333+
jupytext==1.19.1
331334
# via finetuning-scheduler (pyproject.toml)
332335
keyring==25.7.0 ; platform_machine != 'ppc64le' and platform_machine != 's390x'
333336
# via twine
@@ -380,7 +383,7 @@ more-itertools==10.8.0 ; platform_machine != 'ppc64le' and platform_machine != '
380383
# via
381384
# jaraco-classes
382385
# jaraco-functools
383-
multidict==6.7.0
386+
multidict==6.7.1
384387
# via
385388
# aiohttp
386389
# yarl
@@ -414,7 +417,7 @@ nodeenv==1.10.0
414417
# via
415418
# pre-commit
416419
# pyright
417-
notebook==7.5.2
420+
notebook==7.5.3
418421
# via finetuning-scheduler (pyproject.toml)
419422
notebook-shim==0.2.4
420423
# via
@@ -644,12 +647,10 @@ requests==2.32.5
644647
# datasets
645648
# docker
646649
# evaluate
647-
# huggingface-hub
648650
# id
649651
# jupyterlab-server
650652
# mlflow-skinny
651653
# requests-toolbelt
652-
# transformers
653654
# twine
654655
requests-toolbelt==1.0.0
655656
# via twine
@@ -665,7 +666,7 @@ rfc3986-validator==0.1.1
665666
# jupyter-events
666667
rfc3987-syntax==1.1.0
667668
# via jsonschema
668-
rich==14.2.0
669+
rich==14.3.1
669670
# via
670671
# finetuning-scheduler (pyproject.toml)
671672
# twine
@@ -699,11 +700,13 @@ send2trash==2.1.0
699700
# via jupyter-server
700701
sentencepiece==0.2.1
701702
# via finetuning-scheduler (pyproject.toml)
702-
setuptools==80.10.1
703+
setuptools==80.10.2
703704
# via
704705
# jupyterlab
705706
# lightning-utilities
706707
# torch
708+
shellingham==1.5.4
709+
# via huggingface-hub
707710
six==1.17.0
708711
# via
709712
# python-dateutil
@@ -777,12 +780,16 @@ traitlets==5.14.3
777780
# nbclient
778781
# nbconvert
779782
# nbformat
780-
transformers==4.57.6
783+
transformers==5.0.0
781784
# via finetuning-scheduler (pyproject.toml)
782785
twine==6.2.0
783786
# via
784787
# finetuning-scheduler (pyproject.toml:dev)
785788
# finetuning-scheduler (pyproject.toml:test)
789+
typer-slim==0.21.1
790+
# via
791+
# huggingface-hub
792+
# transformers
786793
typeshed-client==2.8.2
787794
# via jsonargparse
788795
typing-extensions==4.15.0
@@ -814,6 +821,7 @@ typing-extensions==4.15.0
814821
# sqlalchemy
815822
# starlette
816823
# torch
824+
# typer-slim
817825
# typeshed-client
818826
# typing-inspection
819827
# uvicorn
@@ -837,7 +845,7 @@ virtualenv==20.36.1
837845
# via pre-commit
838846
waitress==3.0.2 ; sys_platform == 'win32'
839847
# via mlflow
840-
wcwidth==0.3.1
848+
wcwidth==0.4.0
841849
# via prompt-toolkit
842850
webcolors==25.10.0
843851
# via jsonschema

src/fts_examples/fts_superglue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ def _convert_to_features(self, example_batch: LazyDict) -> BatchEncoding:
173173
"""
174174
text_pairs = list(zip(example_batch[self.text_fields[0]], example_batch[self.text_fields[1]]))
175175
# Tokenize the text/text pairs
176-
features = self.tokenizer.batch_encode_plus(
176+
features = self.tokenizer(
177177
text_pairs, max_length=self.hparams.max_seq_length, padding="longest", truncation=True
178178
)
179179
# Rename label to labels to make it easier to pass to model forward

src/fts_examples/ipynb_src/fts_superglue_nb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ def _convert_to_features(self, example_batch: datasets.arrow_dataset.LazyDict) -
303303
"""
304304
text_pairs = list(zip(example_batch[self.text_fields[0]], example_batch[self.text_fields[1]]))
305305
# Tokenize the text/text pairs
306-
features = self.tokenizer.batch_encode_plus(
306+
features = self.tokenizer(
307307
text_pairs, max_length=self.hparams.max_seq_length, padding="longest", truncation=True
308308
)
309309
# Rename label to labels to make it easier to pass to model forward

0 commit comments

Comments
 (0)