Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,6 @@ import torch
|
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
import spaces
|
5 |
-
from gradio_imageslider import ImageSlider
|
6 |
from KandiSuperRes import get_SR_pipeline
|
7 |
|
8 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
@@ -23,7 +22,7 @@ def inference(image, size):
|
|
23 |
|
24 |
print(f"Image size ({device}): {size} ... OK")
|
25 |
|
26 |
-
return
|
27 |
|
28 |
|
29 |
title = "KandiSuperRes - diffusion model for super resolution"
|
@@ -37,13 +36,13 @@ gr.Interface(inference,
|
|
37 |
type="value",
|
38 |
value='2x',
|
39 |
label='Resolution model')],
|
40 |
-
|
41 |
title=title,
|
42 |
description=description,
|
43 |
article=article,
|
44 |
examples=[['groot.jpeg', "2x"]],
|
45 |
allow_flagging='never',
|
46 |
cache_examples=False,
|
47 |
-
delete_cache=(
|
48 |
).queue(api_open=True).launch(show_error=True, show_api=True)
|
49 |
|
|
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
import spaces
|
|
|
5 |
from KandiSuperRes import get_SR_pipeline
|
6 |
|
7 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
|
|
22 |
|
23 |
print(f"Image size ({device}): {size} ... OK")
|
24 |
|
25 |
+
return result
|
26 |
|
27 |
|
28 |
title = "KandiSuperRes - diffusion model for super resolution"
|
|
|
36 |
type="value",
|
37 |
value='2x',
|
38 |
label='Resolution model')],
|
39 |
+
gr.Image(type="filepath", label="Output"),
|
40 |
title=title,
|
41 |
description=description,
|
42 |
article=article,
|
43 |
examples=[['groot.jpeg', "2x"]],
|
44 |
allow_flagging='never',
|
45 |
cache_examples=False,
|
46 |
+
delete_cache=(1800, 3600),
|
47 |
).queue(api_open=True).launch(show_error=True, show_api=True)
|
48 |
|