Text-to-Image
Diffusers
Safetensors
English
StableDiffusionXLPipeline
Inference Endpoints
Bedovyy commited on
Commit
aa9c3cd
1 Parent(s): d982fd6

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +88 -10
README.md CHANGED
@@ -1,10 +1,88 @@
1
- ---
2
- license: other
3
- license_name: fair-ai-public-license-1.0-sd
4
- license_link: https://freedevproject.org/faipl-1.0-sd/
5
- language:
6
- - en
7
- library_name: diffusers
8
- pipeline_tag: text-to-image
9
- base_model: Laxhar/noobai-XL-1.0
10
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ license_name: fair-ai-public-license-1.0-sd
4
+ license_link: https://freedevproject.org/faipl-1.0-sd/
5
+ language:
6
+ - en
7
+ library_name: diffusers
8
+ pipeline_tag: text-to-image
9
+ base_model: Laxhar/noobai-XL-1.0
10
+ datasets:
11
+ - pls2000/pixiv20161029_20241026_monthly_rank_1_50
12
+ - pls2000/aiart_channel_nai3_geachu
13
+ - cagliostrolab/860k-ordered-tags
14
+ ---
15
+
16
+ ## Training
17
+
18
+ Trained in 2steps, `Lion8bit` for quick training and `Lion` for detail.
19
+
20
+ - Tool: kohya-ss/sd-scripts
21
+ - GPUs: 2x RTX3090
22
+
23
+
24
+ ### arcaillous-nbxl-v10b.safetensors
25
+ ```
26
+ NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
27
+ --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
28
+ --dataset_config="arca_nbxl.toml" \
29
+ --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
30
+ --save_model_as="safetensors" \
31
+ --train_batch_size 4 --gradient_accumulation_steps 64 \
32
+ --learning_rate=1e-5 --optimizer_type="Lion8bit" \
33
+ --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
34
+ --sdpa \
35
+ --no_half_vae \
36
+ --cache_latents --cache_latents_to_disk \
37
+ --gradient_checkpointing \
38
+ --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
39
+ --ddp_timeout=10000000 \
40
+ --max_train_epochs 4 --save_every_n_epochs 1 \
41
+ --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
42
+ ```
43
+
44
+ ### arcaillous-nbxl-v10.safetensors
45
+ ```
46
+ NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
47
+ --pretrained_model_name_or_path="/ai/train/ckpt/arcaillous-nbxl-v10b.safetensors" \
48
+ --dataset_config="arca_nbxl.toml" \
49
+ --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10" \
50
+ --save_model_as="safetensors" \
51
+ --train_batch_size 1 --gradient_accumulation_steps 256 \
52
+ --learning_rate=1e-5 --optimizer_type="Lion" \
53
+ --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" \
54
+ --min_snr_gamma 5 --ip_noise_gamma 0.05 --debiased_estimation_loss \
55
+ --xformers \
56
+ --no_half_vae \
57
+ --cache_latents --cache_latents_to_disk \
58
+ --gradient_checkpointing \
59
+ --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
60
+ --ddp_timeout=10000000 \
61
+ --max_train_epochs 8 --save_every_n_epochs 1 --save_every_n_steps 200 \
62
+ --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
63
+ ```
64
+
65
+ ### arca_nbxl.toml
66
+ ```
67
+ [general]
68
+ shuffle_caption = true
69
+ caption_tag_dropout_rate = 0.2
70
+ keep_tokens_separator = "|||"
71
+ caption_extension = ".txt"
72
+
73
+ [[datasets]]
74
+ enable_bucket = true
75
+ min_bucket_reso = 512
76
+ max_bucket_reso = 4096
77
+ resolution = 1024
78
+
79
+ [[datasets.subsets]]
80
+ image_dir = "/ai/data/sd/datasets/danbooru-gs"
81
+ num_repeats = 1
82
+ [[datasets.subsets]]
83
+ image_dir = "/storage/pls2000_pixiv20161029_20241026_monthly_rank_1_50/to_train"
84
+ num_repeats = 1
85
+ [[datasets.subsets]]
86
+ image_dir = "/storage/aichan/to_train"
87
+ num_repeats = 1
88
+ ```