Skip to content
Merged

fix ci #1344

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .azure-pipelines/scripts/ut/run_ut.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ export TQDM_MININTERVAL=60
uv pip install pytest-cov pytest-html
uv pip install -r /auto-round/test/test_cpu/requirements.txt \
--extra-index-url https://download.pytorch.org/whl/cpu
uv pip install torch==2.8.0 torchvision --index-url https://download.pytorch.org/whl/cpu

# install latest gguf for ut test
cd ~ || exit 1
git clone -b master --quiet --single-branch https://github.com/ggml-org/llama.cpp.git && cd llama.cpp/gguf-py && uv pip install . sentencepiece
uv pip install --upgrade transformers

cd /auto-round && uv pip install .

Expand Down
2 changes: 1 addition & 1 deletion .azure-pipelines/scripts/ut/run_ut_cuda.sh
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ function run_unit_test() {
rm -rf .coverage* *.xml *.html

uv pip install -v git+https://github.com/casper-hansen/AutoAWQ.git --no-build-isolation
uv pip install -v git+https://github.com/ModelCloud/GPTQModel.git@v5.6.12 --no-build-isolation
uv pip install https://github.com/ModelCloud/GPTQModel/releases/download/v5.6.0/gptqmodel-5.6.0+cu126torch2.9-cp310-cp310-linux_x86_64.whl --no-build-isolation
uv pip install -r https://raw.githubusercontent.com/ModelCloud/GPTQModel/refs/heads/main/requirements.txt
CMAKE_ARGS="-DGGML_CUDA=on -DLLAVA_BUILD=off" uv pip install llama-cpp-python
uv pip install 'git+https://github.com/ggml-org/llama.cpp.git#subdirectory=gguf-py'
Expand Down
11 changes: 10 additions & 1 deletion auto_round/calib_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,8 +456,15 @@ def default_tokenizer_function(examples):


@register_dataset("mbpp")
@register_dataset("google-research-datasets/mbpp")
def get_mbpp_dataset(
tokenizer, seqlen, dataset_name="mbpp", split=None, seed=42, apply_chat_template=False, system_prompt=None
tokenizer,
seqlen,
dataset_name="google-research-datasets/mbpp",
split=None,
seed=42,
apply_chat_template=False,
system_prompt=None,
):
"""Returns a dataloader for the specified dataset and split.

Expand All @@ -474,6 +481,8 @@ def get_mbpp_dataset(
"""
from datasets import load_dataset

dataset_name = "google-research-datasets/mbpp"

tokenizer_function = get_tokenizer_function(
tokenizer, seqlen, apply_chat_template=apply_chat_template, system_prompt=system_prompt
)
Expand Down
25 changes: 25 additions & 0 deletions auto_round/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,28 @@
from auto_round.utils.device import *
from auto_round.utils.common import *
from auto_round.utils.model import *

import transformers
from packaging.version import Version

DATASET_PATCHED = False
# tmp batch for transformers v5.0
if Version(transformers.__version__) >= Version("5.0.0") and not DATASET_PATCHED:
import datasets

datasets.original_load_dataset = datasets.load_dataset

def patch_load_dataset(*args, **kwargs):
for dataset_name, replace_name in [("openbookqa", "allenai/openbookqa")]:
if len(args) > 0 and dataset_name in args[0]:
args = (replace_name,) + args[1:]
if "path" in kwargs and kwargs["path"] is not None:
if dataset_name in kwargs["path"] and replace_name not in kwargs["path"]:
kwargs["path"] = kwargs["path"].replace(dataset_name, replace_name)
if "name" in kwargs and kwargs["name"] is not None:
if dataset_name in kwargs["name"] and replace_name not in kwargs["name"]:
kwargs["name"] = kwargs["name"].replace(dataset_name, replace_name)
return datasets.original_load_dataset(*args, **kwargs)

datasets.load_dataset = patch_load_dataset
DATASET_PATCHED = True
1 change: 1 addition & 0 deletions test/fixtures.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import shutil

import datasets
import pytest
import torch
import transformers
Expand Down
Loading