Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -61,7 +61,7 @@ with gr.Blocks() as demo:
|
|
61 |
seed = gr.Slider(
|
62 |
label="Seed",
|
63 |
minimum=0,
|
64 |
-
maximum=
|
65 |
step=1,
|
66 |
value=0,
|
67 |
)
|
@@ -81,10 +81,17 @@ with gr.Blocks() as demo:
|
|
81 |
step=32,
|
82 |
value=2048,
|
83 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
|
85 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
86 |
if randomize_seed:
|
87 |
-
seed = random.randint(0,
|
88 |
return seed
|
89 |
|
90 |
@spaces.GPU(duration=10)
|
@@ -93,7 +100,6 @@ with gr.Blocks() as demo:
|
|
93 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
94 |
return pipe(
|
95 |
prompt=str,,
|
96 |
-
generator=torch.Generator().manual_seed(int(seed)),
|
97 |
num_inference_steps=1,
|
98 |
guidance_scale=0.,
|
99 |
height=int(height),
|
|
|
61 |
seed = gr.Slider(
|
62 |
label="Seed",
|
63 |
minimum=0,
|
64 |
+
maximum=99999999,
|
65 |
step=1,
|
66 |
value=0,
|
67 |
)
|
|
|
81 |
step=32,
|
82 |
value=2048,
|
83 |
)
|
84 |
+
gr.Examples(
|
85 |
+
examples=examples,
|
86 |
+
inputs=prompt,
|
87 |
+
outputs=[result, seed],
|
88 |
+
fn=generate,
|
89 |
+
cache_examples=CACHE_EXAMPLES,
|
90 |
+
)
|
91 |
|
92 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
93 |
if randomize_seed:
|
94 |
+
seed = random.randint(0, 99999999)
|
95 |
return seed
|
96 |
|
97 |
@spaces.GPU(duration=10)
|
|
|
100 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
101 |
return pipe(
|
102 |
prompt=str,,
|
|
|
103 |
num_inference_steps=1,
|
104 |
guidance_scale=0.,
|
105 |
height=int(height),
|