Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
c46b16f
1
Parent(s):
be190eb
Update app.py
Browse files
app.py
CHANGED
@@ -28,7 +28,6 @@ sdxl_loras = [
|
|
28 |
saved_names = [hf_hub_download(repo_id, filename) for _, _, repo_id, _, filename, _ in sdxl_loras]
|
29 |
|
30 |
def update_selection(selected_state: gr.SelectData):
|
31 |
-
sleep(60)
|
32 |
lora_repo = sdxl_loras[selected_state.index][2]
|
33 |
instance_prompt = sdxl_loras[selected_state.index][3]
|
34 |
updated_text = f"### Selected: [{lora_repo}](https://huggingface.co/{lora_repo})"
|
@@ -41,7 +40,6 @@ mutable_pipe = StableDiffusionXLPipeline.from_pretrained(
|
|
41 |
torch_dtype=torch.float16,
|
42 |
).to("cpu")
|
43 |
original_pipe = copy.deepcopy(mutable_pipe)
|
44 |
-
mutable_pipe.to("cuda")
|
45 |
|
46 |
last_lora = ""
|
47 |
last_merged = False
|
@@ -58,7 +56,6 @@ def run_lora(prompt, negative, weight, selected_state):
|
|
58 |
if(last_lora != repo_name):
|
59 |
if(last_merged):
|
60 |
pipe = copy.deepcopy(original_pipe)
|
61 |
-
pipe.to("cuda")
|
62 |
else:
|
63 |
pipe.unload_lora_weights()
|
64 |
is_compatible = sdxl_loras[selected_state.index][5]
|
@@ -78,6 +75,7 @@ def run_lora(prompt, negative, weight, selected_state):
|
|
78 |
)
|
79 |
lora_model.merge_to(pipe.text_encoder, pipe.unet, weights_sd, torch.float16, "cuda")
|
80 |
last_merged = True
|
|
|
81 |
image = pipe(
|
82 |
prompt=prompt, negative_prompt=negative, num_inference_steps=20, guidance_scale=7.5, cross_attention_kwargs=cross_attention_kwargs).images[0]
|
83 |
last_lora = repo_name
|
|
|
28 |
saved_names = [hf_hub_download(repo_id, filename) for _, _, repo_id, _, filename, _ in sdxl_loras]
|
29 |
|
30 |
def update_selection(selected_state: gr.SelectData):
|
|
|
31 |
lora_repo = sdxl_loras[selected_state.index][2]
|
32 |
instance_prompt = sdxl_loras[selected_state.index][3]
|
33 |
updated_text = f"### Selected: [{lora_repo}](https://huggingface.co/{lora_repo})"
|
|
|
40 |
torch_dtype=torch.float16,
|
41 |
).to("cpu")
|
42 |
original_pipe = copy.deepcopy(mutable_pipe)
|
|
|
43 |
|
44 |
last_lora = ""
|
45 |
last_merged = False
|
|
|
56 |
if(last_lora != repo_name):
|
57 |
if(last_merged):
|
58 |
pipe = copy.deepcopy(original_pipe)
|
|
|
59 |
else:
|
60 |
pipe.unload_lora_weights()
|
61 |
is_compatible = sdxl_loras[selected_state.index][5]
|
|
|
75 |
)
|
76 |
lora_model.merge_to(pipe.text_encoder, pipe.unet, weights_sd, torch.float16, "cuda")
|
77 |
last_merged = True
|
78 |
+
pipe.to("cuda")
|
79 |
image = pipe(
|
80 |
prompt=prompt, negative_prompt=negative, num_inference_steps=20, guidance_scale=7.5, cross_attention_kwargs=cross_attention_kwargs).images[0]
|
81 |
last_lora = repo_name
|