taesiri commited on
Commit
b9c781e
1 Parent(s): ce86d1c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -5
app.py CHANGED
@@ -3,15 +3,12 @@ import gradio as gr
3
  from transformers import CLIPProcessor, CLIPModel
4
  import spaces
5
 
6
-
7
- # Check if CUDA is available and set the device accordingly
8
  model = CLIPModel.from_pretrained("openai/clip-vit-base-patch16").to("cuda")
9
  processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch16")
10
 
11
 
12
  @spaces.GPU(duration=120)
13
  def calculate_score(image, text):
14
- print(device)
15
  labels = text.split(";")
16
  labels = [l.strip() for l in labels]
17
  labels = list(filter(None, labels))
@@ -19,8 +16,8 @@ def calculate_score(image, text):
19
  return dict()
20
  inputs = processor(text=labels, images=image, return_tensors="pt", padding=True)
21
  inputs = {
22
- k: v.to(device) for k, v in inputs.items()
23
- } # Move tensors to the appropriate device
24
  outputs = model(**inputs)
25
  logits_per_image = (
26
  outputs.logits_per_image.detach().cpu().numpy()
 
3
  from transformers import CLIPProcessor, CLIPModel
4
  import spaces
5
 
 
 
6
  model = CLIPModel.from_pretrained("openai/clip-vit-base-patch16").to("cuda")
7
  processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch16")
8
 
9
 
10
  @spaces.GPU(duration=120)
11
  def calculate_score(image, text):
 
12
  labels = text.split(";")
13
  labels = [l.strip() for l in labels]
14
  labels = list(filter(None, labels))
 
16
  return dict()
17
  inputs = processor(text=labels, images=image, return_tensors="pt", padding=True)
18
  inputs = {
19
+ k: v.to("cuda") for k, v in inputs.items()
20
+ }
21
  outputs = model(**inputs)
22
  logits_per_image = (
23
  outputs.logits_per_image.detach().cpu().numpy()