Spaces:
Running
on
Zero
Running
on
Zero
File size: 2,230 Bytes
0022789 9148f31 39ffbcb 0022789 b2cd494 ff46a0e 154494a 9148f31 b2cd494 0022789 813561e ff46a0e 82192ca 0022789 813561e 0022789 ff46a0e 0022789 67994b3 813561e 154494a f1841f9 154494a f1841f9 154494a f1841f9 154494a f1841f9 0022789 f1841f9 0022789 ff46a0e 0022789 c5349e7 92f8a2c c5349e7 0022789 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
import gradio as gr
from gradio_imageslider import ImageSlider
from PIL import Image
import numpy as np
from aura_sr import AuraSR
import torch
import spaces
# Force CPU usage
torch.set_default_tensor_type(torch.FloatTensor)
# Override torch.load to always use CPU
original_load = torch.load
torch.load = lambda *args, **kwargs: original_load(*args, **kwargs, map_location=torch.device('cpu'))
# Initialize the AuraSR model
aura_sr = AuraSR.from_pretrained("fal-ai/AuraSR-v2")
# Restore original torch.load
torch.load = original_load
def process_image(input_image):
if input_image is None:
raise gr.Error("Please provide an image to upscale.")
# Convert to PIL Image for resizing
pil_image = Image.fromarray(input_image)
# Upscale the image using AuraSR
upscaled_image = process_image_on_gpu(pil_image)
# Convert result to numpy array if it's not already
result_array = np.array(upscaled_image)
return [input_image, result_array]
@spaces.GPU
def process_image_on_gpu(pil_image):
return aura_sr.upscale_4x_overlapped(pil_image)
title = """<h1 align="center">AuraSR-v2 - An open reproduction of the GigaGAN Upscaler from fal.ai</h1>
<p><center>
<a href="https://huggingface.co/fal/AuraSR-v2" target="_blank">[AuraSR-v2]</a>
<a href="https://blog.fal.ai/introducing-aurasr-an-open-reproduction-of-the-gigagan-upscaler-2/" target="_blank">[Blog Post]</a>
<a href="https://huggingface.co/fal-ai/AuraSR" target="_blank">[v1 Model Page]</a>
</center></p>
"""
with gr.Blocks() as demo:
gr.HTML(title)
with gr.Row():
with gr.Column(scale=1):
input_image = gr.Image(label="Input Image", type="numpy")
process_btn = gr.Button("Upscale Image")
with gr.Column(scale=1):
output_slider = ImageSlider(label="Before / After", type="numpy")
process_btn.click(
fn=process_image,
inputs=[input_image],
outputs=output_slider
)
# Add examples
gr.Examples(
examples=[
"image1.png",
"image3.png"
],
inputs=input_image,
outputs=output_slider,
fn=process_image,
cache_examples=True
)
demo.launch(debug=True) |