AlekseyCalvin commited on
Commit
1e0017b
1 Parent(s): 8d33af5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -6
app.py CHANGED
@@ -99,7 +99,7 @@ def update_selection(evt: gr.SelectData, width, height):
99
  )
100
 
101
  @spaces.GPU(duration=70)
102
- def generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress):
103
  pipe.to("cuda")
104
  generator = torch.Generator(device="cuda").manual_seed(seed)
105
 
@@ -107,6 +107,7 @@ def generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height,
107
  # Generate image
108
  image = pipe(
109
  prompt=f"{prompt} {trigger_word}",
 
110
  num_inference_steps=steps,
111
  guidance_scale=cfg_scale,
112
  width=width,
@@ -116,9 +117,11 @@ def generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height,
116
  ).images[0]
117
  return image
118
 
119
- def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
 
 
120
  if selected_index is None:
121
- raise gr.Error("You must select a LoRA before proceeding.")
122
 
123
  selected_lora = loras[selected_index]
124
  lora_path = selected_lora["repo"]
@@ -150,7 +153,7 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
150
  if randomize_seed:
151
  seed = random.randint(0, MAX_SEED)
152
 
153
- image = generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
154
  pipe.to("cpu")
155
  pipe.unload_lora_weights()
156
  return image, seed
@@ -185,7 +188,10 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as app:
185
  selected_index = gr.State(None)
186
  with gr.Row():
187
  with gr.Column(scale=3):
188
- prompt = gr.Textbox(label="Prompt", lines=1, placeholder="Select LoRa/Style & type prompt!")
 
 
 
189
  with gr.Column(scale=1, elem_id="gen_column"):
190
  generate_button = gr.Button("Generate", variant="primary", elem_id="gen_btn")
191
  with gr.Row():
@@ -227,7 +233,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as app:
227
  gr.on(
228
  triggers=[generate_button.click, prompt.submit],
229
  fn=run_lora,
230
- inputs=[prompt, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale],
231
  outputs=[result, seed]
232
  )
233
 
 
99
  )
100
 
101
  @spaces.GPU(duration=70)
102
+ def generate_image(prompt, negative_prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress):
103
  pipe.to("cuda")
104
  generator = torch.Generator(device="cuda").manual_seed(seed)
105
 
 
107
  # Generate image
108
  image = pipe(
109
  prompt=f"{prompt} {trigger_word}",
110
+ negative_prompt=negative_prompt,
111
  num_inference_steps=steps,
112
  guidance_scale=cfg_scale,
113
  width=width,
 
117
  ).images[0]
118
  return image
119
 
120
+ def run_lora(prompt, negative_prompt, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
121
+ if negative == "":
122
+ negative = None
123
  if selected_index is None:
124
+ raise gr.Error("Select a LoRA adapter square before proceeding.")
125
 
126
  selected_lora = loras[selected_index]
127
  lora_path = selected_lora["repo"]
 
153
  if randomize_seed:
154
  seed = random.randint(0, MAX_SEED)
155
 
156
+ image = generate_image(prompt, negative_prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
157
  pipe.to("cpu")
158
  pipe.unload_lora_weights()
159
  return image, seed
 
188
  selected_index = gr.State(None)
189
  with gr.Row():
190
  with gr.Column(scale=3):
191
+ prompt = gr.Textbox(label="Prompt", lines=1, placeholder="Select LoRa/Style & type prompt! What do you want to see?")
192
+ with gr.Row():
193
+ with gr.Column(scale=3):
194
+ negative_prompt = gr.Textbox(label="Negative Prompt", lines=1, placeholder="List unwanted conditions, open-fluxedly!")
195
  with gr.Column(scale=1, elem_id="gen_column"):
196
  generate_button = gr.Button("Generate", variant="primary", elem_id="gen_btn")
197
  with gr.Row():
 
233
  gr.on(
234
  triggers=[generate_button.click, prompt.submit],
235
  fn=run_lora,
236
+ inputs=[prompt, negative_prompt, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale],
237
  outputs=[result, seed]
238
  )
239