ImportError: cannot import name 'SiglipVisionModel' from 'transformers'

#11
by Tahmimhossain - opened

Why is this error happening? Do I need anything to run locally?


ImportError Traceback (most recent call last)
Cell In[21], line 4
1 from transformers import AutoModel, AutoProcessor
2 import torch
----> 4 model = AutoModel.from_pretrained("visheratin/MC-LLaVA-3b", torch_dtype=torch.float16, trust_remote_code=True).to("cuda")
6 processor = AutoProcessor.from_pretrained("visheratin/MC-LLaVA-3b", trust_remote_code=True)
8 with torch.inference_mode():

File ~/.conda/envs/Multimodal_RAG_test/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:526, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
523 if kwargs.get("quantization_config", None) is not None:
524 _ = kwargs.pop("quantization_config")
--> 526 config, kwargs = AutoConfig.from_pretrained(
527 pretrained_model_name_or_path,
528 return_unused_kwargs=True,
529 trust_remote_code=trust_remote_code,
530 code_revision=code_revision,
531 _commit_hash=commit_hash,
532 **hub_kwargs,
533 **kwargs,
534 )
536 # if torch_dtype=auto was passed here, ensure to pass it on
537 if kwargs_orig.get("torch_dtype", None) == "auto":

File ~/.conda/envs/Multimodal_RAG_test/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:1091, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
...
---> 11 from transformers import PreTrainedModel, SiglipVisionModel
12 from transformers.activations import ACT2FN
13 from transformers.cache_utils import Cache, DynamicCache

ImportError: cannot import name 'SiglipVisionModel' from 'transformers'

Sign up or log in to comment