|
[Path] |
|
pretrained_model_name_or_path = "/notebooks/box/Trim.safetensors" |
|
train_data_dir = "/notebooks/LoRA_Image/" |
|
|
|
__root_dir = "/notebooks/LoRA/sd-scripts/" |
|
logging_dir = "calc:f'{__root_dir}logs'" |
|
output_dir = "calc:f'{__root_dir}outputs'" |
|
sample_prompts = "/notebooks/LoRA_Setting/A.txt" |
|
|
|
[Prompt] |
|
sample_every_n_epochs = 1 |
|
sample_sampler = "k_euler_a" |
|
|
|
[Else] |
|
shuffle_caption = true |
|
caption_extension = ".txt" |
|
keep_tokens = 1 |
|
|
|
color_aug = true |
|
|
|
__upper_px = 512 |
|
__resolution_x = "calc:512 + __upper_px" |
|
__resolution_y = "calc:512 + __upper_px" |
|
|
|
resolution = "calc:f'{__resolution_x},{__resolution_y}'" |
|
enable_bucket = true |
|
min_bucket_reso = "calc:320 + __upper_px" |
|
bucket_no_upscale = true |
|
caption_dropout_every_n_epochs = 9 |
|
caption_tag_dropout_rate = 0.2 |
|
save_every_n_epochs = 2 |
|
train_batch_size = 1 |
|
xformers = true |
|
max_train_epochs = 14 |
|
persistent_data_loader_workers = true |
|
seed = 987 |
|
mixed_precision = "fp16" |
|
save_precision = "ref:mixed_precision" |
|
clip_skip = 2 |
|
|
|
optimizer_type = "Lion" |
|
__div_rate = 8 |
|
learning_rate = "calc:1.0e-4 / __div_rate" |
|
text_encoder_lr= "calc:5.0e-5 / __div_rate" |
|
unet_lr = "ref:learning_rate" |
|
|
|
lr_scheduler = "cosine_with_restarts" |
|
lr_warmup_steps = 500 |
|
lr_scheduler_num_cycles = 4 |
|
|
|
network_module = "networks.lora" |
|
network_dim = 128 |
|
network_alpha = "calc:float(network_dim) / 2.0" |
|
|
|
__upper_mixed_precision = "calc:mixed_precision.upper()" |
|
output_name = "calc:f'Akashi_{optimizer_type}Div{__div_rate}1024Px'" |
|
|