amazonaws-sp commited on
Commit
f497200
1 Parent(s): d9c1ade

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -52,7 +52,8 @@ def generate(
52
  vaecall = 'madebyollin/sdxl-vae-fp16-fix',
53
  lora = '',
54
  lora_scale: float = 0.7,
55
- ) -> PIL.Image.Image:
 
56
  if torch.cuda.is_available():
57
 
58
  if not use_vae:
@@ -120,7 +121,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css="style.css") as demo:
120
  step=0.01,
121
  value=0.7,
122
  )
123
- upload = gr.Image(label='Image2Image')
124
  with gr.Row():
125
  prompt = gr.Text(
126
  placeholder="Input prompt",
@@ -243,6 +244,13 @@ with gr.Blocks(theme=gr.themes.Soft(), css="style.css") as demo:
243
  queue=False,
244
  api_name=False,
245
  )
 
 
 
 
 
 
 
246
 
247
  gr.on(
248
  triggers=[
@@ -278,6 +286,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css="style.css") as demo:
278
  vaecall,
279
  lora,
280
  lora_scale,
 
281
  ],
282
  outputs=result,
283
  api_name="run",
 
52
  vaecall = 'madebyollin/sdxl-vae-fp16-fix',
53
  lora = '',
54
  lora_scale: float = 0.7,
55
+ urls = "",
56
+ ):
57
  if torch.cuda.is_available():
58
 
59
  if not use_vae:
 
121
  step=0.01,
122
  value=0.7,
123
  )
124
+ url = gr.Image(label='Img2Img')
125
  with gr.Row():
126
  prompt = gr.Text(
127
  placeholder="Input prompt",
 
244
  queue=False,
245
  api_name=False,
246
  )
247
+ url.change(
248
+ fn=lambda x: gr.update(visible=x),
249
+ inputs=url,
250
+ outputs=urls,
251
+ queue=False,
252
+ api_name=False,
253
+ )
254
 
255
  gr.on(
256
  triggers=[
 
286
  vaecall,
287
  lora,
288
  lora_scale,
289
+ urls,
290
  ],
291
  outputs=result,
292
  api_name="run",