Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py (#1916)
Browse files- Update app.py (a0b409fb20912377082b7b6c9b02ccd0c81ace2f)
- Update app.py (ad2fb65d6b108291962280d4ce2ddbe997df13fe)
Co-authored-by: Pete <[email protected]>
app.py
CHANGED
@@ -111,11 +111,10 @@ def infer(
|
|
111 |
|
112 |
for prediction in json_data["predictions"]:
|
113 |
for image in prediction["images"]:
|
114 |
-
|
115 |
-
images.append(
|
116 |
|
117 |
if profile is not None: # avoid conversion on non-logged-in users
|
118 |
-
pil_image = Image.open(BytesIO(base64.b64decode(image)))
|
119 |
user_history.save_image( # save images + metadata to user history
|
120 |
label=prompt,
|
121 |
image=pil_image,
|
@@ -364,13 +363,11 @@ with block:
|
|
364 |
inputs=[text, negative, guidance_scale],
|
365 |
outputs=[gallery, community_group],
|
366 |
cache_examples=True,
|
367 |
-
postprocess=False,
|
368 |
)
|
369 |
negative.submit(
|
370 |
infer,
|
371 |
inputs=[text, negative, guidance_scale, style_selection],
|
372 |
outputs=[gallery, community_group],
|
373 |
-
postprocess=False,
|
374 |
concurrency_id="infer",
|
375 |
concurrency_limit=8,
|
376 |
)
|
@@ -378,7 +375,6 @@ with block:
|
|
378 |
infer,
|
379 |
inputs=[text, negative, guidance_scale, style_selection],
|
380 |
outputs=[gallery, community_group],
|
381 |
-
postprocess=False,
|
382 |
concurrency_id="infer",
|
383 |
concurrency_limit=8,
|
384 |
)
|
@@ -386,7 +382,6 @@ with block:
|
|
386 |
infer,
|
387 |
inputs=[text, negative, guidance_scale, style_selection],
|
388 |
outputs=[gallery, community_group],
|
389 |
-
postprocess=False,
|
390 |
concurrency_id="infer",
|
391 |
concurrency_limit=8,
|
392 |
)
|
|
|
111 |
|
112 |
for prediction in json_data["predictions"]:
|
113 |
for image in prediction["images"]:
|
114 |
+
pil_image = Image.open(BytesIO(base64.b64decode(image)))
|
115 |
+
images.append(pil_image)
|
116 |
|
117 |
if profile is not None: # avoid conversion on non-logged-in users
|
|
|
118 |
user_history.save_image( # save images + metadata to user history
|
119 |
label=prompt,
|
120 |
image=pil_image,
|
|
|
363 |
inputs=[text, negative, guidance_scale],
|
364 |
outputs=[gallery, community_group],
|
365 |
cache_examples=True,
|
|
|
366 |
)
|
367 |
negative.submit(
|
368 |
infer,
|
369 |
inputs=[text, negative, guidance_scale, style_selection],
|
370 |
outputs=[gallery, community_group],
|
|
|
371 |
concurrency_id="infer",
|
372 |
concurrency_limit=8,
|
373 |
)
|
|
|
375 |
infer,
|
376 |
inputs=[text, negative, guidance_scale, style_selection],
|
377 |
outputs=[gallery, community_group],
|
|
|
378 |
concurrency_id="infer",
|
379 |
concurrency_limit=8,
|
380 |
)
|
|
|
382 |
infer,
|
383 |
inputs=[text, negative, guidance_scale, style_selection],
|
384 |
outputs=[gallery, community_group],
|
|
|
385 |
concurrency_id="infer",
|
386 |
concurrency_limit=8,
|
387 |
)
|