salomonsky commited on
Commit
bc9a69a
1 Parent(s): 72a2518

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -28
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import os
2
- import torch
3
  import gradio as gr
4
  import numpy as np
5
  import random
@@ -17,18 +16,14 @@ MAX_SEED = np.iinfo(np.int32).max
17
  HF_TOKEN = os.environ.get("HF_TOKEN")
18
  HF_TOKEN_UPSCALER = os.environ.get("HF_TOKEN_UPSCALER")
19
 
20
- if not os.path.exists('GFPGANv1.4.pth'):
21
- os.system("wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth -P .")
22
-
23
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
24
- model_path = 'GFPGANv1.4.pth'
25
- gfpgan = GFPGANer(model_path=model_path, upscale_factor=4, arch='clean', channel_multiplier=2, model_name='GPFGAN', device=device)
26
 
27
  async def generate_image(prompt, model, lora_word, width, height, scales, steps, seed):
28
  try:
29
  if seed == -1:
30
  seed = random.randint(0, MAX_SEED)
31
- seed = int(seed)
32
  text = str(Translator().translate(prompt, 'English')) + "," + lora_word
33
  client = AsyncInferenceClient()
34
  image = await client.text_to_image(prompt=text, height=height, width=width, guidance_scale=scales, num_inference_steps=steps, model=model)
@@ -37,14 +32,6 @@ async def generate_image(prompt, model, lora_word, width, height, scales, steps,
37
  print(f"Error generating image: {e}")
38
  return None, None
39
 
40
- def get_upscale_gfpgan(prompt, img_path):
41
- try:
42
- img = gfpgan.enhance(img_path)
43
- return img
44
- except Exception as e:
45
- print(f"Error upscale image: {e}")
46
- return None
47
-
48
  def get_upscale_finegrain(prompt, img_path, upscale_factor):
49
  try:
50
  client = Client("finegrain/finegrain-image-enhancer", hf_token=HF_TOKEN_UPSCALER)
@@ -54,18 +41,17 @@ def get_upscale_finegrain(prompt, img_path, upscale_factor):
54
  print(f"Error upscale image: {e}")
55
  return None
56
 
57
- async def gen(prompt, basemodel, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model, process_lora, upscale_model):
58
  model = enable_lora(lora_model, basemodel) if process_lora else basemodel
59
  image, seed = await generate_image(prompt, model, "", width, height, scales, steps, seed)
60
  if image is None:
61
  return [None, None]
 
62
  image_path = "temp_image.jpg"
63
  image.save(image_path, format="JPEG")
 
64
  if process_upscale:
65
- if upscale_model == "GPFGAN":
66
- upscale_image = get_upscale_gfpgan(prompt, image_path)
67
- elif upscale_model == "Finegrain":
68
- upscale_image = get_upscale_finegrain(prompt, image_path, upscale_factor)
69
  upscale_image_path = "upscale_image.jpg"
70
  upscale_image.save(upscale_image_path, format="JPEG")
71
  return [image_path, upscale_image_path]
@@ -88,20 +74,17 @@ with gr.Blocks(css=css, theme="Nymbo/Nymbo_Theme") as demo:
88
  process_lora = gr.Checkbox(label="Procesar LORA")
89
  process_upscale = gr.Checkbox(label="Procesar Escalador")
90
  upscale_factor = gr.Radio(label="Factor de Escala", choices=[2, 4, 8], value=2)
91
- upscale_model = gr.Radio(label="Modelo de Escalado", choices=["GPFGAN", "Finegrain"], value="GPFGAN")
92
-
93
  with gr.Accordion(label="Opciones Avanzadas", open=False):
94
- width = gr.Slider(label="Ancho", minimum=512, maximum=1280, step=8, value=512)
95
- height = gr.Slider(label="Alto", minimum=512, maximum=1280, step=8, value=512)
96
  scales = gr.Slider(label="Escalado", minimum=1, maximum=20, step=1, value=10)
97
  steps = gr.Slider(label="Pasos", minimum=1, maximum=100, step=1, value=20)
98
  seed = gr.Number(label="Semilla", value=-1)
99
 
100
  btn = gr.Button("Generar")
101
  btn.click(
102
- fn=gen,
103
- inputs=[prompt, basemodel_choice, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model_choice, process_lora, upscale_model,],
104
- outputs=output_res,
105
  )
106
 
107
  demo.launch()
 
1
  import os
 
2
  import gradio as gr
3
  import numpy as np
4
  import random
 
16
  HF_TOKEN = os.environ.get("HF_TOKEN")
17
  HF_TOKEN_UPSCALER = os.environ.get("HF_TOKEN_UPSCALER")
18
 
19
+ def enable_lora(lora_add, basemodel):
20
+ return basemodel if not lora_add else lora_add
 
 
 
 
21
 
22
  async def generate_image(prompt, model, lora_word, width, height, scales, steps, seed):
23
  try:
24
  if seed == -1:
25
  seed = random.randint(0, MAX_SEED)
26
+ seed = int(seed)
27
  text = str(Translator().translate(prompt, 'English')) + "," + lora_word
28
  client = AsyncInferenceClient()
29
  image = await client.text_to_image(prompt=text, height=height, width=width, guidance_scale=scales, num_inference_steps=steps, model=model)
 
32
  print(f"Error generating image: {e}")
33
  return None, None
34
 
 
 
 
 
 
 
 
 
35
  def get_upscale_finegrain(prompt, img_path, upscale_factor):
36
  try:
37
  client = Client("finegrain/finegrain-image-enhancer", hf_token=HF_TOKEN_UPSCALER)
 
41
  print(f"Error upscale image: {e}")
42
  return None
43
 
44
+ async def gen(prompt, basemodel, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model, process_lora):
45
  model = enable_lora(lora_model, basemodel) if process_lora else basemodel
46
  image, seed = await generate_image(prompt, model, "", width, height, scales, steps, seed)
47
  if image is None:
48
  return [None, None]
49
+
50
  image_path = "temp_image.jpg"
51
  image.save(image_path, format="JPEG")
52
+
53
  if process_upscale:
54
+ upscale_image = get_upscale_finegrain(prompt, image_path, upscale_factor)
 
 
 
55
  upscale_image_path = "upscale_image.jpg"
56
  upscale_image.save(upscale_image_path, format="JPEG")
57
  return [image_path, upscale_image_path]
 
74
  process_lora = gr.Checkbox(label="Procesar LORA")
75
  process_upscale = gr.Checkbox(label="Procesar Escalador")
76
  upscale_factor = gr.Radio(label="Factor de Escala", choices=[2, 4, 8], value=2)
77
+
 
78
  with gr.Accordion(label="Opciones Avanzadas", open=False):
79
+ width = gr.Slider(label="Ancho", minimum=512, maximum=1280, step=8, value=1280)
80
+ height = gr.Slider(label="Alto", minimum=512, maximum=1280, step=8, value=768)
81
  scales = gr.Slider(label="Escalado", minimum=1, maximum=20, step=1, value=10)
82
  steps = gr.Slider(label="Pasos", minimum=1, maximum=100, step=1, value=20)
83
  seed = gr.Number(label="Semilla", value=-1)
84
 
85
  btn = gr.Button("Generar")
86
  btn.click(
87
+ fn=gen, inputs=[prompt, basemodel_choice, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model_choice, process_lora, upscale_model,], outputs=output_res,
 
 
88
  )
89
 
90
  demo.launch()