Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -67,7 +67,7 @@ pipe.scheduler = EulerAncestralDiscreteScheduler(
|
|
67 |
# pipe.enable_freeu(b1=1.1, b2=1.1, s1=0.5, s2=0.7)
|
68 |
pipe.enable_xformers_memory_efficient_attention()
|
69 |
pipe.force_zeros_for_empty_prompt = False
|
70 |
-
|
71 |
low_threshold = 100
|
72 |
high_threshold = 200
|
73 |
|
@@ -94,7 +94,6 @@ def get_canny_filter(image):
|
|
94 |
|
95 |
@spaces.GPU
|
96 |
def generate_(prompt, negative_prompt, canny_image, num_steps, controlnet_conditioning_scale, seed):
|
97 |
-
pipe.to('cuda')
|
98 |
generator = torch.Generator("cuda").manual_seed(seed)
|
99 |
images = pipe(
|
100 |
prompt, negative_prompt=negative_prompt, image=canny_image, num_inference_steps=num_steps, controlnet_conditioning_scale=float(controlnet_conditioning_scale),
|
|
|
67 |
# pipe.enable_freeu(b1=1.1, b2=1.1, s1=0.5, s2=0.7)
|
68 |
pipe.enable_xformers_memory_efficient_attention()
|
69 |
pipe.force_zeros_for_empty_prompt = False
|
70 |
+
pipe.to('cuda')
|
71 |
low_threshold = 100
|
72 |
high_threshold = 200
|
73 |
|
|
|
94 |
|
95 |
@spaces.GPU
|
96 |
def generate_(prompt, negative_prompt, canny_image, num_steps, controlnet_conditioning_scale, seed):
|
|
|
97 |
generator = torch.Generator("cuda").manual_seed(seed)
|
98 |
images = pipe(
|
99 |
prompt, negative_prompt=negative_prompt, image=canny_image, num_inference_steps=num_steps, controlnet_conditioning_scale=float(controlnet_conditioning_scale),
|