File size: 2,865 Bytes
3c05da6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
#!/usr/bin/env python
import gradio as gr
from model import Model
from settings import CACHE_EXAMPLES, MAX_SEED
from utils import randomize_seed_fn
def create_demo(model: Model) -> gr.Blocks:
examples = [
'A chair that looks like an avocado',
'An airplane that looks like a banana',
'A spaceship',
'A birthday cupcake',
'A chair that looks like a tree',
'A green boot',
'A penguin',
'Ube ice cream cone',
'A bowl of vegetables',
]
def process_example_fn(prompt: str) -> str:
return model.run_text(prompt)
with gr.Blocks() as demo:
with gr.Box():
with gr.Row(elem_id='prompt-container'):
prompt = gr.Text(
label='Prompt',
show_label=False,
max_lines=1,
placeholder='Enter your prompt').style(container=False)
run_button = gr.Button('Run').style(full_width=False)
result = gr.Model3D(label='Result', show_label=False)
with gr.Accordion('Advanced options', open=False):
seed = gr.Slider(label='Seed',
minimum=0,
maximum=MAX_SEED,
step=1,
value=0)
randomize_seed = gr.Checkbox(label='Randomize seed',
value=True)
guidance_scale = gr.Slider(label='Guidance scale',
minimum=1,
maximum=20,
step=0.1,
value=15.0)
num_inference_steps = gr.Slider(
label='Number of inference steps',
minimum=1,
maximum=100,
step=1,
value=64)
gr.Examples(examples=examples,
inputs=prompt,
outputs=result,
fn=process_example_fn,
cache_examples=CACHE_EXAMPLES)
inputs = [
prompt,
seed,
guidance_scale,
num_inference_steps,
]
prompt.submit(
fn=randomize_seed_fn,
inputs=[seed, randomize_seed],
outputs=seed,
queue=False,
).then(
fn=model.run_text,
inputs=inputs,
outputs=result,
)
run_button.click(
fn=randomize_seed_fn,
inputs=[seed, randomize_seed],
outputs=seed,
queue=False,
).then(
fn=model.run_text,
inputs=inputs,
outputs=result,
)
return demo
|