Spaces:
Runtime error
Runtime error
hysts
commited on
Commit
β’
a962fcd
1
Parent(s):
285c0b5
Update
Browse files- app_caption.py +31 -29
- app_vqa.py +37 -35
app_caption.py
CHANGED
@@ -9,37 +9,39 @@ import gradio as gr
|
|
9 |
from prismer_model import Model
|
10 |
|
11 |
|
12 |
-
def create_demo():
|
13 |
model = Model()
|
14 |
model.mode = 'caption'
|
15 |
-
with gr.
|
16 |
-
with gr.
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
|
|
|
|
43 |
|
44 |
|
45 |
if __name__ == '__main__':
|
|
|
9 |
from prismer_model import Model
|
10 |
|
11 |
|
12 |
+
def create_demo() -> gr.Blocks:
|
13 |
model = Model()
|
14 |
model.mode = 'caption'
|
15 |
+
with gr.Blocks() as demo:
|
16 |
+
with gr.Row():
|
17 |
+
with gr.Column():
|
18 |
+
image = gr.Image(label='Input', type='filepath')
|
19 |
+
model_name = gr.Dropdown(label='Model', choices=['Prismer-Base', 'Prismer-Large'], value='Prismer-Base')
|
20 |
+
run_button = gr.Button('Run')
|
21 |
+
with gr.Column(scale=1.5):
|
22 |
+
caption = gr.Text(label='Model Prediction')
|
23 |
+
with gr.Row():
|
24 |
+
depth = gr.Image(label='Depth')
|
25 |
+
edge = gr.Image(label='Edge')
|
26 |
+
normals = gr.Image(label='Normals')
|
27 |
+
with gr.Row():
|
28 |
+
segmentation = gr.Image(label='Segmentation')
|
29 |
+
object_detection = gr.Image(label='Object Detection')
|
30 |
+
ocr = gr.Image(label='OCR Detection')
|
31 |
+
|
32 |
+
inputs = [image, model_name]
|
33 |
+
outputs = [caption, depth, edge, normals, segmentation, object_detection, ocr]
|
34 |
+
|
35 |
+
paths = sorted(pathlib.Path('prismer/images').glob('*'))
|
36 |
+
examples = [[path.as_posix(), 'Prismer-Base'] for path in paths]
|
37 |
+
gr.Examples(examples=examples,
|
38 |
+
inputs=inputs,
|
39 |
+
outputs=outputs,
|
40 |
+
fn=model.run_caption,
|
41 |
+
cache_examples=os.getenv('SYSTEM') == 'spaces')
|
42 |
+
|
43 |
+
run_button.click(fn=model.run_caption, inputs=inputs, outputs=outputs)
|
44 |
+
return demo
|
45 |
|
46 |
|
47 |
if __name__ == '__main__':
|
app_vqa.py
CHANGED
@@ -9,42 +9,44 @@ import gradio as gr
|
|
9 |
from prismer_model import Model
|
10 |
|
11 |
|
12 |
-
def create_demo():
|
13 |
model = Model()
|
14 |
-
with gr.
|
15 |
-
with gr.
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
|
|
|
|
48 |
|
49 |
|
50 |
if __name__ == '__main__':
|
|
|
9 |
from prismer_model import Model
|
10 |
|
11 |
|
12 |
+
def create_demo() -> gr.Blocks:
|
13 |
model = Model()
|
14 |
+
with gr.Blocks() as demo:
|
15 |
+
with gr.Row():
|
16 |
+
with gr.Column():
|
17 |
+
image = gr.Image(label='Input', type='filepath')
|
18 |
+
model_name = gr.Dropdown(label='Model', choices=['Prismer-Base', 'Prismer-Large'], value='Prismer-Base')
|
19 |
+
question = gr.Text(label='Question')
|
20 |
+
run_button = gr.Button('Run')
|
21 |
+
with gr.Column(scale=1.5):
|
22 |
+
answer = gr.Text(label='Model Prediction')
|
23 |
+
with gr.Row():
|
24 |
+
depth = gr.Image(label='Depth')
|
25 |
+
edge = gr.Image(label='Edge')
|
26 |
+
normals = gr.Image(label='Normals')
|
27 |
+
with gr.Row():
|
28 |
+
segmentation = gr.Image(label='Segmentation')
|
29 |
+
object_detection = gr.Image(label='Object Detection')
|
30 |
+
ocr = gr.Image(label='OCR Detection')
|
31 |
+
|
32 |
+
inputs = [image, model_name, question]
|
33 |
+
outputs = [answer, depth, edge, normals, segmentation, object_detection, ocr]
|
34 |
+
|
35 |
+
paths = sorted(pathlib.Path('prismer/images').glob('*'))
|
36 |
+
ex_questions = ['What is the man on the left doing?',
|
37 |
+
'What is this person doing?',
|
38 |
+
'How many cows in this image?',
|
39 |
+
'What is the type of animal in this image?',
|
40 |
+
'What toy is it?']
|
41 |
+
examples = [[path.as_posix(), 'Prismer-Base', ex_questions[i]] for i, path in enumerate(paths)]
|
42 |
+
gr.Examples(examples=examples,
|
43 |
+
inputs=inputs,
|
44 |
+
outputs=outputs,
|
45 |
+
fn=model.run_vqa,
|
46 |
+
cache_examples=os.getenv('SYSTEM') == 'spaces')
|
47 |
+
|
48 |
+
run_button.click(fn=model.run_vqa, inputs=inputs, outputs=outputs)
|
49 |
+
return demo
|
50 |
|
51 |
|
52 |
if __name__ == '__main__':
|