Skip to content

Commit 37b8855

Browse files
pbielakPiotr Bielak
andauthored
Disable image processor loading for THUDM/glm-4v-9b (#2315)
Co-authored-by: Piotr Bielak <pbielak@habana.ai>
1 parent 2fa9dee commit 37b8855

1 file changed

Lines changed: 1 addition & 0 deletions

File tree

optimum/habana/transformers/modeling_utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -893,6 +893,7 @@ def adapt_transformers_to_gaudi():
893893
transformers.AutoModelForSeq2SeqLM.register(GLM4VConfig, GLM4VForConditionalGeneration)
894894
transformers.AutoModelForVision2Seq.register(GLM4VConfig, GLM4VForConditionalGeneration)
895895
transformers.AutoModelForSequenceClassification.register(GLM4VConfig, GLM4VForSequenceClassification)
896+
transformers.pipelines.image_to_text.ImageToTextPipeline._load_image_processor = False
896897
else:
897898
# Register chatglm with optimization on Gaudi
898899
transformers.AutoConfig.register("chatglm", ChatGLMConfig)

0 commit comments

Comments
 (0)