Spaces:
Running
on
Zero
Running
on
Zero
ameerazam08
commited on
Commit
β’
9d12334
1
Parent(s):
8299563
Update app.py
Browse files
app.py
CHANGED
@@ -11,31 +11,36 @@ import huggingface_hub
|
|
11 |
huggingface_hub.login(os.getenv('HF_TOKEN_FLUX'))
|
12 |
|
13 |
check_min_version("0.30.2")
|
14 |
-
|
15 |
-
|
16 |
-
# Build pipeline
|
17 |
-
controlnet = FluxControlNetModel.from_pretrained("alimama-creative/FLUX.1-dev-Controlnet-Inpainting-Beta", torch_dtype=torch.bfloat16)
|
18 |
transformer = FluxTransformer2DModel.from_pretrained(
|
19 |
"black-forest-labs/FLUX.1-dev", subfolder='transformer', torch_dytpe=torch.bfloat16
|
20 |
)
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
pipe.
|
28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
|
31 |
MARKDOWN = """
|
32 |
-
# FLUX.1-dev-Inpainting-Model-
|
|
|
33 |
"""
|
34 |
|
35 |
@spaces.GPU()
|
36 |
def process(input_image_editor,
|
37 |
prompt,
|
38 |
negative_prompt,
|
|
|
39 |
controlnet_conditioning_scale,
|
40 |
guidance_scale,
|
41 |
seed,
|
@@ -46,6 +51,8 @@ def process(input_image_editor,
|
|
46 |
mask = input_image_editor['layers'][0]
|
47 |
size = (768, 768)
|
48 |
image_or = image.copy()
|
|
|
|
|
49 |
|
50 |
image = image.convert("RGB").resize(size)
|
51 |
mask = mask.convert("RGB").resize(size)
|
@@ -80,6 +87,7 @@ with gr.Blocks() as demo:
|
|
80 |
|
81 |
prompt = gr.Textbox(lines=2, placeholder="Enter prompt here...")
|
82 |
negative_prompt = gr.Textbox(lines=2, placeholder="Enter negative_prompt here...")
|
|
|
83 |
controlnet_conditioning_scale = gr.Slider(minimum=0, step=0.01, maximum=1, value=0.9, label="controlnet_conditioning_scale")
|
84 |
guidance_scale = gr.Slider(minimum=1, step=0.5, maximum=10, value=3.5, label="Image to generate")
|
85 |
seed = gr.Slider(minimum=0, step=1, maximum=10000000, value=124, label="Seed Value")
|
@@ -101,6 +109,7 @@ with gr.Blocks() as demo:
|
|
101 |
input_image_editor_component,
|
102 |
prompt,
|
103 |
negative_prompt,
|
|
|
104 |
controlnet_conditioning_scale,
|
105 |
guidance_scale,
|
106 |
seed,
|
|
|
11 |
huggingface_hub.login(os.getenv('HF_TOKEN_FLUX'))
|
12 |
|
13 |
check_min_version("0.30.2")
|
|
|
|
|
|
|
|
|
14 |
transformer = FluxTransformer2DModel.from_pretrained(
|
15 |
"black-forest-labs/FLUX.1-dev", subfolder='transformer', torch_dytpe=torch.bfloat16
|
16 |
)
|
17 |
+
|
18 |
+
|
19 |
+
# Build pipeline
|
20 |
+
|
21 |
+
def load_models(model_name):
|
22 |
+
controlnet = FluxControlNetModel.from_pretrained(f"alimama-creative/{model_name}", torch_dtype=torch.bfloat16)
|
23 |
+
pipe = FluxControlNetInpaintingPipeline.from_pretrained(
|
24 |
+
"black-forest-labs/FLUX.1-dev",
|
25 |
+
controlnet=controlnet,
|
26 |
+
transformer=transformer,
|
27 |
+
torch_dtype=torch.bfloat16
|
28 |
+
).to("cuda")
|
29 |
+
pipe.transformer.to(torch.bfloat16)
|
30 |
+
pipe.controlnet.to(torch.bfloat16)
|
31 |
+
return pipe
|
32 |
|
33 |
|
34 |
MARKDOWN = """
|
35 |
+
# FLUX.1-dev-Inpainting-Model-GPU π₯
|
36 |
+
Model by alimama-creative
|
37 |
"""
|
38 |
|
39 |
@spaces.GPU()
|
40 |
def process(input_image_editor,
|
41 |
prompt,
|
42 |
negative_prompt,
|
43 |
+
model_name,
|
44 |
controlnet_conditioning_scale,
|
45 |
guidance_scale,
|
46 |
seed,
|
|
|
51 |
mask = input_image_editor['layers'][0]
|
52 |
size = (768, 768)
|
53 |
image_or = image.copy()
|
54 |
+
pipe = load_models(model_name)
|
55 |
+
pipe = pipe.to("cuda")
|
56 |
|
57 |
image = image.convert("RGB").resize(size)
|
58 |
mask = mask.convert("RGB").resize(size)
|
|
|
87 |
|
88 |
prompt = gr.Textbox(lines=2, placeholder="Enter prompt here...")
|
89 |
negative_prompt = gr.Textbox(lines=2, placeholder="Enter negative_prompt here...")
|
90 |
+
model_name = gr.Dropdown(choices=["FLUX.1-dev-Controlnet-Inpainting-Beta","FLUX.1-dev-Controlnet-Inpainting-Alpha"], label="Select Model Inpainting", value="FLUX.1-dev-Controlnet-Inpainting-Beta"),
|
91 |
controlnet_conditioning_scale = gr.Slider(minimum=0, step=0.01, maximum=1, value=0.9, label="controlnet_conditioning_scale")
|
92 |
guidance_scale = gr.Slider(minimum=1, step=0.5, maximum=10, value=3.5, label="Image to generate")
|
93 |
seed = gr.Slider(minimum=0, step=1, maximum=10000000, value=124, label="Seed Value")
|
|
|
109 |
input_image_editor_component,
|
110 |
prompt,
|
111 |
negative_prompt,
|
112 |
+
model_name,
|
113 |
controlnet_conditioning_scale,
|
114 |
guidance_scale,
|
115 |
seed,
|