Zeph27 commited on
Commit
be8d9d0
1 Parent(s): 287c61a
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +59 -3
  3. requirements.txt +8 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ Aura/
app.py CHANGED
@@ -1,7 +1,63 @@
 
1
  import gradio as gr
 
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
7
  demo.launch()
 
1
+ import spaces
2
  import gradio as gr
3
+ from gradio_imageslider import ImageSlider
4
+ from PIL import Image
5
+ import numpy as np
6
+ from aura_sr import AuraSR
7
+ import torch
8
+ import time
9
+ import spaces
10
 
11
+ # Force CPU usage
12
+ torch.set_default_tensor_type(torch.FloatTensor)
13
+
14
+ # Override torch.load to always use CPU
15
+ original_load = torch.load
16
+ torch.load = lambda *args, **kwargs: original_load(*args, **kwargs, map_location=torch.device('cpu'))
17
+
18
+ # Initialize the AuraSR model
19
+ aura_sr = AuraSR.from_pretrained("fal/AuraSR-v2")
20
+
21
+ # Restore original torch.load
22
+ torch.load = original_load
23
+
24
+ def process_image(input_image, scale_factor):
25
+ if input_image is None:
26
+ raise gr.Error("Please provide an image to upscale.")
27
+
28
+ start_time = time.time()
29
+
30
+ # Convert to PIL Image for resizing
31
+ pil_image = Image.fromarray(input_image)
32
+
33
+ if scale_factor == 2:
34
+ pil_image = pil_image.resize((int(pil_image.width * 0.5), int(pil_image.height * 0.5)), Image.LANCZOS)
35
+ elif scale_factor == 3:
36
+ pil_image = pil_image.resize((int(pil_image.width * 0.75), int(pil_image.height * 0.75)), Image.LANCZOS)
37
+
38
+ # Upscale the image using AuraSR
39
+ upscaled_image = process_image_on_gpu(pil_image)
40
+
41
+ # Convert result to numpy array if it's not already
42
+ result_array = np.array(upscaled_image)
43
+
44
+ end_time = time.time()
45
+ processing_time = end_time - start_time
46
+
47
+ return [input_image, result_array], f"Processing time: {processing_time:.2f} seconds"
48
+
49
+ @spaces.GPU
50
+ def process_image_on_gpu(pil_image):
51
+ return aura_sr.upscale_4x(pil_image)
52
+
53
+ with gr.Blocks() as demo:
54
+ gr.Markdown("# Image Upscaler")
55
+ with gr.Row():
56
+ input_image = gr.Image(label="Input Image", type="numpy")
57
+ scale_factor = gr.Radio([2, 3, 4], label="Scale Factor", value=4)
58
+ image_slider = ImageSlider(label="Before/After")
59
+ upscale_button = gr.Button("Upscale")
60
+ processing_time_text = gr.Textbox(label="Processing Time")
61
+ upscale_button.click(fn=process_image, inputs=[input_image, scale_factor], outputs=[image_slider, processing_time_text])
62
 
 
63
  demo.launch()
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ aura-sr==0.0.4
2
+ gradio==4.41.0
3
+ spaces==0.29.3
4
+ --extra-index-url https://download.pytorch.org/whl/cu121
5
+ torch==2.3.1+cu121
6
+ torchaudio
7
+ torchvision
8
+ gradio_imageslider==0.0.20