Spaces:
Running
Running
Kartheekb7
commited on
Commit
•
e0bdd7f
1
Parent(s):
aa3f38a
Update app.py
Browse files
app.py
CHANGED
@@ -5,12 +5,8 @@ from PIL import Image
|
|
5 |
from datasets import load_dataset
|
6 |
import random
|
7 |
from datasets import load_from_disk
|
8 |
-
|
9 |
-
dataset = load_from_disk("./train")
|
10 |
-
|
11 |
from collections import OrderedDict
|
12 |
-
|
13 |
-
|
14 |
FRUITS30_CLASSES = OrderedDict(
|
15 |
{
|
16 |
"0" : "acerolas",
|
@@ -45,9 +41,7 @@ FRUITS30_CLASSES = OrderedDict(
|
|
45 |
"29" : "watermelons"
|
46 |
}
|
47 |
)
|
48 |
-
|
49 |
labels = list(FRUITS30_CLASSES.values())
|
50 |
-
|
51 |
model, _, preprocess = open_clip.create_model_and_transforms('ViT-B-32', pretrained='laion2b_s34b_b79k')
|
52 |
model.eval() # model in train mode by default, impacts some models with BatchNorm or stochastic depth active
|
53 |
tokenizer = open_clip.get_tokenizer('ViT-B-32')
|
@@ -55,7 +49,6 @@ tokenizer = open_clip.get_tokenizer('ViT-B-32')
|
|
55 |
def create_interface():
|
56 |
# Store current correct labels in a mutable container
|
57 |
current_correct_labels = []
|
58 |
-
|
59 |
def get_image():
|
60 |
indices = random.sample(range(len(dataset)), 1)
|
61 |
selected_images = [dataset[i]['image'] for i in indices]
|
@@ -78,7 +71,7 @@ def create_interface():
|
|
78 |
with gr.Blocks() as demo:
|
79 |
# Create components
|
80 |
with gr.Row():
|
81 |
-
img1 = gr.Image(type="pil", label="Fruit")
|
82 |
with gr.Row():
|
83 |
label1 = gr.Textbox(label="Name this fruit")
|
84 |
|
@@ -92,8 +85,6 @@ def create_interface():
|
|
92 |
fn=get_image,
|
93 |
outputs=[img1]
|
94 |
)
|
95 |
-
|
96 |
-
|
97 |
# Evaluate user input on submit button click
|
98 |
submit_btn.click(
|
99 |
fn=on_submit,
|
@@ -101,7 +92,7 @@ def create_interface():
|
|
101 |
outputs=result
|
102 |
)
|
103 |
|
104 |
-
demo.launch()
|
105 |
|
106 |
# Run the game
|
107 |
create_interface()
|
|
|
5 |
from datasets import load_dataset
|
6 |
import random
|
7 |
from datasets import load_from_disk
|
8 |
+
dataset = load_from_disk("./fruits_dataset/train/train")
|
|
|
|
|
9 |
from collections import OrderedDict
|
|
|
|
|
10 |
FRUITS30_CLASSES = OrderedDict(
|
11 |
{
|
12 |
"0" : "acerolas",
|
|
|
41 |
"29" : "watermelons"
|
42 |
}
|
43 |
)
|
|
|
44 |
labels = list(FRUITS30_CLASSES.values())
|
|
|
45 |
model, _, preprocess = open_clip.create_model_and_transforms('ViT-B-32', pretrained='laion2b_s34b_b79k')
|
46 |
model.eval() # model in train mode by default, impacts some models with BatchNorm or stochastic depth active
|
47 |
tokenizer = open_clip.get_tokenizer('ViT-B-32')
|
|
|
49 |
def create_interface():
|
50 |
# Store current correct labels in a mutable container
|
51 |
current_correct_labels = []
|
|
|
52 |
def get_image():
|
53 |
indices = random.sample(range(len(dataset)), 1)
|
54 |
selected_images = [dataset[i]['image'] for i in indices]
|
|
|
71 |
with gr.Blocks() as demo:
|
72 |
# Create components
|
73 |
with gr.Row():
|
74 |
+
img1 = gr.Image(type="pil", label="Fruit",height = 256,width = 256)
|
75 |
with gr.Row():
|
76 |
label1 = gr.Textbox(label="Name this fruit")
|
77 |
|
|
|
85 |
fn=get_image,
|
86 |
outputs=[img1]
|
87 |
)
|
|
|
|
|
88 |
# Evaluate user input on submit button click
|
89 |
submit_btn.click(
|
90 |
fn=on_submit,
|
|
|
92 |
outputs=result
|
93 |
)
|
94 |
|
95 |
+
demo.launch(debug = True)
|
96 |
|
97 |
# Run the game
|
98 |
create_interface()
|