ysmao commited on
Commit
6013cb0
1 Parent(s): 7d60aec

add examples

Browse files
Files changed (3) hide show
  1. app.py +38 -11
  2. examples/layout_input.jpg +0 -0
  3. examples/layout_output.jpg +0 -0
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import torch
 
2
  torch.jit.script = lambda f: f
3
 
4
  import spaces
@@ -29,12 +30,19 @@ pipe = StableDiffusionControlNetPipeline.from_pretrained(
29
  pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
30
 
31
  apply_depth = DepthDetector()
32
- apply_normal = NormalDetector(
33
- hf_hub_download("camenduru/DSINE", filename="dsine.pt")
34
- )
35
  apply_segm = SegmDetector()
36
 
37
 
 
 
 
 
 
 
 
 
 
38
  @spaces.GPU(duration=20)
39
  def generate(
40
  input_image,
@@ -89,13 +97,18 @@ with block:
89
  with gr.Row():
90
  gr.Markdown("## KuJiaLe Layout ControlNet Demo")
91
  with gr.Row():
92
- input_image = gr.Image(type="numpy", label="input_image")
93
- with gr.Row():
94
- prompt = gr.Textbox(label="Prompt")
95
- with gr.Row():
96
- run_button = gr.Button(value="Run")
97
  with gr.Row():
98
  with gr.Column():
 
 
 
 
 
 
 
99
  with gr.Accordion("Advanced options", open=False):
100
  num_samples = gr.Slider(
101
  label="Images", minimum=1, maximum=2, value=1, step=1
@@ -135,9 +148,23 @@ with block:
135
  value="longbody, lowres, bad anatomy, human, extra digit, fewer digits, cropped, worst quality, low quality",
136
  )
137
 
 
 
 
 
 
 
 
 
138
  with gr.Row():
139
- image_gallery = gr.Gallery(
140
- label="Output", show_label=False, elem_id="gallery"
 
 
 
 
 
 
141
  )
142
 
143
  ips = [
@@ -154,4 +181,4 @@ with block:
154
  ]
155
  run_button.click(fn=generate, inputs=ips, outputs=[image_gallery])
156
 
157
- block.launch(server_name='0.0.0.0')
 
1
  import torch
2
+
3
  torch.jit.script = lambda f: f
4
 
5
  import spaces
 
30
  pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
31
 
32
  apply_depth = DepthDetector()
33
+ apply_normal = NormalDetector(hf_hub_download("camenduru/DSINE", filename="dsine.pt"))
 
 
34
  apply_segm = SegmDetector()
35
 
36
 
37
+ layout_examples = [
38
+ [
39
+ "examples/layout_input.jpg",
40
+ "A modern bedroom",
41
+ "examples/layout_output.jpg",
42
+ ],
43
+ ]
44
+
45
+
46
  @spaces.GPU(duration=20)
47
  def generate(
48
  input_image,
 
97
  with gr.Row():
98
  gr.Markdown("## KuJiaLe Layout ControlNet Demo")
99
  with gr.Row():
100
+ gr.Markdown(
101
+ "### Checkout our released model at [kujiale-ai/controlnet-layout](https://huggingface.co/kujiale-ai/controlnet-layout)"
102
+ )
 
 
103
  with gr.Row():
104
  with gr.Column():
105
+ with gr.Row():
106
+ input_image = gr.Image(
107
+ sources="upload", type="numpy", label="Input Image", height=512
108
+ )
109
+
110
+ prompt = gr.Textbox(label="Prompt")
111
+ run_button = gr.Button(value="Run")
112
  with gr.Accordion("Advanced options", open=False):
113
  num_samples = gr.Slider(
114
  label="Images", minimum=1, maximum=2, value=1, step=1
 
148
  value="longbody, lowres, bad anatomy, human, extra digit, fewer digits, cropped, worst quality, low quality",
149
  )
150
 
151
+ with gr.Column():
152
+ image_gallery = gr.Gallery(
153
+ label="Output",
154
+ show_label=False,
155
+ elem_id="gallery",
156
+ height=512,
157
+ object_fit="contain",
158
+ )
159
  with gr.Row():
160
+ dummy_image_for_outputs = gr.Image(visible=False, label="Result")
161
+ gr.Examples(
162
+ fn=lambda *args: [[args[-1]], args[-2]],
163
+ examples=layout_examples,
164
+ inputs=[input_image, prompt, dummy_image_for_outputs],
165
+ outputs=[image_gallery, prompt],
166
+ run_on_click=True,
167
+ examples_per_page=1024,
168
  )
169
 
170
  ips = [
 
181
  ]
182
  run_button.click(fn=generate, inputs=ips, outputs=[image_gallery])
183
 
184
+ block.launch(server_name="0.0.0.0")
examples/layout_input.jpg ADDED
examples/layout_output.jpg ADDED