Spaces:
Running
on
Zero
Running
on
Zero
nhatipoglu
commited on
Commit
•
7bae4a0
1
Parent(s):
8462782
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ import re
|
|
12 |
models = {
|
13 |
"Qwen/Qwen2-VL-7B-Instruct": Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-7B-Instruct", torch_dtype="auto", device_map="auto"),
|
14 |
"Qwen/Qwen2-VL-2B-Instruct": Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-2B-Instruct", torch_dtype="auto", device_map="auto"),
|
15 |
-
|
16 |
"openai/clip-vit-base-patch32": CLIPModel.from_pretrained("openai/clip-vit-base-patch32"),
|
17 |
"Salesforce/blip-image-captioning-base": BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
|
18 |
}
|
|
|
12 |
models = {
|
13 |
"Qwen/Qwen2-VL-7B-Instruct": Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-7B-Instruct", torch_dtype="auto", device_map="auto"),
|
14 |
"Qwen/Qwen2-VL-2B-Instruct": Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-2B-Instruct", torch_dtype="auto", device_map="auto"),
|
15 |
+
|
16 |
"openai/clip-vit-base-patch32": CLIPModel.from_pretrained("openai/clip-vit-base-patch32"),
|
17 |
"Salesforce/blip-image-captioning-base": BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
|
18 |
}
|