multimodalart HF staff commited on
Commit
11afe3b
1 Parent(s): ea1e5ba

Fix defaults

Browse files
Files changed (1) hide show
  1. app.py +19 -4
app.py CHANGED
@@ -199,9 +199,11 @@ def start_training(
199
  enable_xformers_memory_efficient_attention,
200
  adam_beta1,
201
  adam_beta2,
 
202
  prodigy_beta3,
203
  prodigy_decouple,
204
  adam_weight_decay,
 
205
  adam_weight_decay_text_encoder,
206
  adam_epsilon,
207
  prodigy_use_bias_correction,
@@ -250,9 +252,7 @@ def start_training(
250
  f"prior_loss_weight={prior_loss_weight}",
251
  f"num_new_tokens_per_abstraction={int(num_new_tokens_per_abstraction)}",
252
  f"num_train_epochs={int(num_train_epochs)}",
253
- f"prodigy_beta3={prodigy_beta3}",
254
  f"adam_weight_decay={adam_weight_decay}",
255
- f"adam_weight_decay_text_encoder={adam_weight_decay_text_encoder}",
256
  f"adam_epsilon={adam_epsilon}",
257
  f"prodigy_decouple={prodigy_decouple}",
258
  f"prodigy_use_bias_correction={prodigy_use_bias_correction}",
@@ -275,7 +275,7 @@ def start_training(
275
  commands.append("train_text_encoder_ti")
276
  elif train_text_encoder:
277
  commands.append("train_text_encoder")
278
- commands.append(f"--train_text_encoder_frac={train_text_encoder_frac}")
279
  if enable_xformers_memory_efficient_attention:
280
  commands.append("enable_xformers_memory_efficient_attention")
281
  if use_snr_gamma:
@@ -294,6 +294,10 @@ def start_training(
294
  shutil.copy(image, class_folder)
295
  commands.append(f"class_data_dir={class_folder}")
296
  shutil.copytree(class_folder, f"{spacerunner_folder}/{class_folder}")
 
 
 
 
297
  print(commands)
298
  # Joining the commands with ';' separator for spacerunner format
299
  spacerunner_args = ';'.join(commands)
@@ -752,6 +756,9 @@ with gr.Blocks(css=css, theme=theme) as demo:
752
  step=0.01,
753
  value=0.999
754
  )
 
 
 
755
  prodigy_beta3 = gr.Number(
756
  label="Prodigy Beta 3",
757
  value=None,
@@ -759,7 +766,10 @@ with gr.Blocks(css=css, theme=theme) as demo:
759
  minimum=0,
760
  maximum=1,
761
  )
762
- prodigy_decouple = gr.Checkbox(label="Prodigy Decouple")
 
 
 
763
  adam_weight_decay = gr.Number(
764
  label="Adam Weight Decay",
765
  value=1e-04,
@@ -767,6 +777,9 @@ with gr.Blocks(css=css, theme=theme) as demo:
767
  minimum=0,
768
  maximum=1,
769
  )
 
 
 
770
  adam_weight_decay_text_encoder = gr.Number(
771
  label="Adam Weight Decay Text Encoder",
772
  value=None,
@@ -975,9 +988,11 @@ with gr.Blocks(css=css, theme=theme) as demo:
975
  enable_xformers_memory_efficient_attention,
976
  adam_beta1,
977
  adam_beta2,
 
978
  prodigy_beta3,
979
  prodigy_decouple,
980
  adam_weight_decay,
 
981
  adam_weight_decay_text_encoder,
982
  adam_epsilon,
983
  prodigy_use_bias_correction,
 
199
  enable_xformers_memory_efficient_attention,
200
  adam_beta1,
201
  adam_beta2,
202
+ use_prodigy_beta3,
203
  prodigy_beta3,
204
  prodigy_decouple,
205
  adam_weight_decay,
206
+ use_adam_weight_decay_text_encoder,
207
  adam_weight_decay_text_encoder,
208
  adam_epsilon,
209
  prodigy_use_bias_correction,
 
252
  f"prior_loss_weight={prior_loss_weight}",
253
  f"num_new_tokens_per_abstraction={int(num_new_tokens_per_abstraction)}",
254
  f"num_train_epochs={int(num_train_epochs)}",
 
255
  f"adam_weight_decay={adam_weight_decay}",
 
256
  f"adam_epsilon={adam_epsilon}",
257
  f"prodigy_decouple={prodigy_decouple}",
258
  f"prodigy_use_bias_correction={prodigy_use_bias_correction}",
 
275
  commands.append("train_text_encoder_ti")
276
  elif train_text_encoder:
277
  commands.append("train_text_encoder")
278
+ commands.append(f"train_text_encoder_frac={train_text_encoder_frac}")
279
  if enable_xformers_memory_efficient_attention:
280
  commands.append("enable_xformers_memory_efficient_attention")
281
  if use_snr_gamma:
 
294
  shutil.copy(image, class_folder)
295
  commands.append(f"class_data_dir={class_folder}")
296
  shutil.copytree(class_folder, f"{spacerunner_folder}/{class_folder}")
297
+ if use_prodigy_beta3:
298
+ commands.append(f"prodigy_beta3={prodigy_beta3}")
299
+ if use_adam_weight_decay_text_encoder:
300
+ commands.append(f"adam_weight_decay_text_encoder={adam_weight_decay_text_encoder}")
301
  print(commands)
302
  # Joining the commands with ';' separator for spacerunner format
303
  spacerunner_args = ';'.join(commands)
 
756
  step=0.01,
757
  value=0.999
758
  )
759
+ use_prodigy_beta3 = gr.Checkbox(
760
+ label="Use Prodigy Beta 3?"
761
+ )
762
  prodigy_beta3 = gr.Number(
763
  label="Prodigy Beta 3",
764
  value=None,
 
766
  minimum=0,
767
  maximum=1,
768
  )
769
+ prodigy_decouple = gr.Checkbox(
770
+ label="Prodigy Decouple",
771
+ value=True
772
+ )
773
  adam_weight_decay = gr.Number(
774
  label="Adam Weight Decay",
775
  value=1e-04,
 
777
  minimum=0,
778
  maximum=1,
779
  )
780
+ use_adam_weight_decay_text_encoder = gr.Checkbox(
781
+ label="Use Adam Weight Decay Text Encoder"
782
+ )
783
  adam_weight_decay_text_encoder = gr.Number(
784
  label="Adam Weight Decay Text Encoder",
785
  value=None,
 
988
  enable_xformers_memory_efficient_attention,
989
  adam_beta1,
990
  adam_beta2,
991
+ use_prodigy_beta3,
992
  prodigy_beta3,
993
  prodigy_decouple,
994
  adam_weight_decay,
995
+ use_adam_weight_decay_text_encoder,
996
  adam_weight_decay_text_encoder,
997
  adam_epsilon,
998
  prodigy_use_bias_correction,