Skip to content

Commit 8603683

Browse files
authored
[WWB]: move diffusers imports closer to usage (openvinotoolkit#2046)
CVS-165875 in some cases diffusers import may breaks transformers models loading, however that should not be used at all. Moved diffusers closer to their usage for avoid possible issues
1 parent 4560c7d commit 8603683

File tree

3 files changed

+11
-4
lines changed

3 files changed

+11
-4
lines changed

tools/who_what_benchmark/whowhatbench/model_loaders.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import torch
44

55
from transformers import AutoConfig, AutoModelForCausalLM, AutoModel, AutoModelForVision2Seq, AutoTokenizer
6-
from diffusers import DiffusionPipeline, AutoPipelineForImage2Image, AutoPipelineForInpainting
76

87
from .utils import mock_torch_cuda_is_available
98

@@ -27,6 +26,7 @@ def __init__(self, model, model_dir, model_type):
2726
except Exception:
2827
self.config = AutoConfig.from_pretrained(model_dir)
2928
elif model_type == "text-to-image":
29+
from diffusers import DiffusionPipeline
3030
self.config = DiffusionPipeline.load_config(
3131
model_dir, trust_remote_code=True)
3232

@@ -175,6 +175,7 @@ def load_text2image_model(
175175
logger.info("Using OpenvINO GenAI API")
176176
model = load_text2image_genai_pipeline(model_id, device, ov_config)
177177
elif use_hf:
178+
from diffusers import DiffusionPipeline
178179
logger.info("Using HF Transformers API")
179180
model = DiffusionPipeline.from_pretrained(
180181
model_id, trust_remote_code=True)
@@ -287,6 +288,7 @@ def load_imagetext2image_model(
287288
model_id, device="CPU", ov_config=None, use_hf=False, use_genai=False
288289
):
289290
if use_hf:
291+
from diffusers import AutoPipelineForImage2Image
290292
logger.info("Using HF Transformers API")
291293
model = AutoPipelineForImage2Image.from_pretrained(
292294
model_id, trust_remote_code=True
@@ -333,6 +335,7 @@ def load_inpainting_model(
333335
model_id, device="CPU", ov_config=None, use_hf=False, use_genai=False
334336
):
335337
if use_hf:
338+
from diffusers import AutoPipelineForInpainting
336339
logger.info("Using HF Transformers API")
337340
model = AutoPipelineForInpainting.from_pretrained(
338341
model_id, trust_remote_code=True

tools/who_what_benchmark/whowhatbench/utils.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22
import torch
33
from contextlib import contextmanager
44

5-
from diffusers.utils import torch_utils
6-
75

86
def new_randn_tensor(
97
shape: Union[Tuple, List],
@@ -22,11 +20,17 @@ def new_randn_tensor(
2220

2321

2422
def patch_diffusers():
23+
from diffusers.utils import torch_utils
2524
torch_utils.randn_tensor = new_randn_tensor
2625

2726

2827
@contextmanager
2928
def mock_torch_cuda_is_available(to_patch):
29+
try:
30+
# import bnb before patching for avoid attempt to load cuda extension during first import
31+
import bitsandbytes as bnb # noqa: F401
32+
except ImportError:
33+
pass
3034
original_is_available = torch.cuda.is_available
3135
if to_patch:
3236
torch.cuda.is_available = lambda: True

tools/who_what_benchmark/whowhatbench/visualtext_evaluator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import os
44
import datasets
55
import pandas as pd
6-
from diffusers.utils.loading_utils import load_image
6+
from transformers.image_utils import load_image
77
from tqdm import tqdm
88
from transformers import set_seed
99

0 commit comments

Comments
 (0)