kohya_ss / test /config /LoKR-AdamW8bit-toml.json
zengxi123's picture
Upload folder using huggingface_hub
fb83c5b verified
raw
history blame contribute delete
No virus
4.18 kB
{
"LoRA_type": "LyCORIS/LoKr",
"LyCORIS_preset": "full",
"adaptive_noise_scale": 0,
"additional_parameters": "--lr_scheduler_type \"CosineAnnealingLR\" --lr_scheduler_args \"T_max=1000\" \"eta_min=0e-0\"",
"block_alphas": "",
"block_dims": "",
"block_lr_zero_threshold": "",
"bucket_no_upscale": true,
"bucket_reso_steps": 1,
"bypass_mode": false,
"cache_latents": true,
"cache_latents_to_disk": true,
"caption_dropout_every_n_epochs": 0,
"caption_dropout_rate": 0.1,
"caption_extension": ".txt",
"clip_skip": "1",
"color_aug": false,
"constrain": 0,
"conv_alpha": 1,
"conv_block_alphas": "",
"conv_block_dims": "",
"conv_dim": 100000,
"dataset_config": "./test/config/dataset.toml",
"debiased_estimation_loss": false,
"decompose_both": false,
"dim_from_weights": false,
"dora_wd": false,
"down_lr_weight": "",
"enable_bucket": true,
"epoch": 150,
"extra_accelerate_launch_args": "",
"factor": 6,
"flip_aug": false,
"fp8_base": false,
"full_bf16": false,
"full_fp16": false,
"gpu_ids": "",
"gradient_accumulation_steps": 1,
"gradient_checkpointing": false,
"huber_c": 0.1,
"huber_schedule": "snr",
"ip_noise_gamma": 0,
"ip_noise_gamma_random_strength": false,
"keep_tokens": 1,
"learning_rate": 1,
"log_tracker_config": "",
"log_tracker_name": "",
"logging_dir": "./test/logs",
"lora_network_weights": "",
"loss_type": "l2",
"lr_scheduler": "cosine",
"lr_scheduler_args": "",
"lr_scheduler_num_cycles": "",
"lr_scheduler_power": "",
"lr_warmup": 0,
"main_process_port": 0,
"masked_loss": false,
"max_bucket_reso": 2048,
"max_data_loader_n_workers": "0",
"max_grad_norm": 1,
"max_resolution": "512,512",
"max_timestep": 1000,
"max_token_length": "75",
"max_train_epochs": "",
"max_train_steps": "",
"mem_eff_attn": false,
"mid_lr_weight": "",
"min_bucket_reso": 256,
"min_snr_gamma": 5,
"min_timestep": 0,
"mixed_precision": "bf16",
"model_list": "custom",
"module_dropout": 0,
"multi_gpu": false,
"multires_noise_discount": 0.1,
"multires_noise_iterations": 6,
"network_alpha": 1,
"network_dim": 100000,
"network_dropout": 0,
"noise_offset": 0,
"noise_offset_random_strength": false,
"noise_offset_type": "Multires",
"num_cpu_threads_per_process": 2,
"num_machines": 1,
"num_processes": 1,
"optimizer": "Prodigy",
"optimizer_args": "\"d0=1e-5\" \"d_coef=1.0\" \"weight_decay=0.4\" \"decouple=True\" \"safeguard_warmup=True\" \"use_bias_correction=True\"",
"output_dir": "./test/output",
"output_name": "LoKR-AdamW8bit-toml",
"persistent_data_loader_workers": false,
"pretrained_model_name_or_path": "runwayml/stable-diffusion-v1-5",
"prior_loss_weight": 1,
"random_crop": false,
"rank_dropout": 0,
"rank_dropout_scale": false,
"reg_data_dir": "",
"rescaled": false,
"resume": "",
"sample_every_n_epochs": 0,
"sample_every_n_steps": 25,
"sample_prompts": "a painting of a gas mask , by darius kawasaki",
"sample_sampler": "euler_a",
"save_as_bool": false,
"save_every_n_epochs": 15,
"save_every_n_steps": 0,
"save_last_n_steps": 0,
"save_last_n_steps_state": 0,
"save_model_as": "safetensors",
"save_precision": "bf16",
"save_state": false,
"save_state_on_train_end": false,
"scale_v_pred_loss_like_noise_pred": false,
"scale_weight_norms": 0,
"sdxl": false,
"sdxl_cache_text_encoder_outputs": false,
"sdxl_no_half_vae": true,
"seed": "",
"shuffle_caption": true,
"stop_text_encoder_training": 0,
"text_encoder_lr": 1,
"train_batch_size": 2,
"train_data_dir": "",
"train_norm": false,
"train_on_input": false,
"training_comment": "KoopaTroopa",
"unet_lr": 1,
"unit": 1,
"up_lr_weight": "",
"use_cp": false,
"use_scalar": false,
"use_tucker": false,
"use_wandb": false,
"v2": false,
"v_parameterization": false,
"v_pred_like_loss": 0,
"vae": "",
"vae_batch_size": 0,
"wandb_api_key": "",
"wandb_run_name": "",
"weighted_captions": false,
"xformers": "xformers"
}