sotediffusion-v2 / comfyui_workflow.json
Disty0's picture
Upload comfyui_workflow.json
f24bff2 verified
raw
history blame
No virus
19.5 kB
{
"last_node_id": 261,
"last_link_id": 578,
"nodes": [
{
"id": 181,
"type": "Note",
"pos": [
-819.9128446704433,
383.3899331971004
],
"size": {
"0": 336.93280029296875,
"1": 114.20084381103516
},
"flags": {},
"order": 0,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Recommended CFG: 7 | Or choose from range: 4-8\nMore CFG = more prompt and quality tags following but less creativity and flexibility.\nToo much CFG will burn the image and will try to follow the prompt a bit too much.\nToo little CFG will default to Anime / VN style."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 180,
"type": "Note",
"pos": [
-1148.9128446704428,
359.3899331971004
],
"size": {
"0": 317.2171325683594,
"1": 140.8779754638672
},
"flags": {},
"order": 1,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Recommended: 1024x1536 with 42 Compression\n\nUse a multiply of 128 for the resolutions.\nAny resolution works as long as the dimensions are a multiply of 128.\nCompression ratio can be between 32 and 64.\nIt affects the Stage C's internal resolution.\nLower compression = higher internal resolution."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 36,
"type": "StableCascade_StageB_Conditioning",
"pos": [
-119.29915486737418,
451.69258382597536
],
"size": {
"0": 311.6343688964844,
"1": 46
},
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 472,
"slot_index": 0
},
{
"name": "stage_c",
"type": "LATENT",
"link": 523
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
471
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StableCascade_StageB_Conditioning"
}
},
{
"id": 179,
"type": "Note",
"pos": [
-462.29915486737417,
306.69258382597536
],
"size": {
"0": 327.17431640625,
"1": 185.5946807861328
},
"flags": {},
"order": 2,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Scheduler and add noise options affects the iris type on the waifus.\n\nTry experimenting to see what fits your tastes.\nYou can start with DDIM Uniform and Exponential.\n\nDisabling add noise option is usually better for far shot eyes but can be worse for others.\n\nAffects of the shift value is small but Stage B is trained using shift 4."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 213,
"type": "ModelSamplingStableCascade",
"pos": [
-452.29915486737417,
199.69258382597508
],
"size": {
"0": 301.27374267578125,
"1": 58
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 492,
"slot_index": 0
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
514
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingStableCascade"
},
"widgets_values": [
4
]
},
{
"id": 210,
"type": "VAEDecodeTiled",
"pos": [
481,
81
],
"size": {
"0": 210,
"1": 78
},
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 477
},
{
"name": "vae",
"type": "VAE",
"link": 478
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
510
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeTiled"
},
"widgets_values": [
2048
]
},
{
"id": 140,
"type": "VAELoader",
"pos": [
217,
79
],
"size": {
"0": 247.75624084472656,
"1": 76.29617309570312
},
"flags": {},
"order": 3,
"mode": 0,
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
478
],
"shape": 3,
"slot_index": 0
}
],
"title": "Load Stage A\n",
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": [
"stage_a_ft_hq.safetensors"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 183,
"type": "Note",
"pos": [
-467,
554
],
"size": {
"0": 654.0673828125,
"1": 251.213134765625
},
"flags": {},
"order": 4,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"This model does understand natural language but it will understand WD Tags better.\n\nCharacter tag require additional feature tags such as hair type, color, length etc.\nUse \"hatsune miku, aqua hair, twintails\" instead of just \"hatsune miku\".\n\nCharacter tags are formatted like this: character, serises.\nSo use \"ganyu, genshin impact\" instead of \"ganyu \\(genshin impact\\)\".\n\nThis model won't hallucinate a scene for you. Specify what you want in the image, otherwise the model won't generate anything.\n\nextremely aesthetic and best quality tags can limit the model too much on some cases. You can try very aesthetic / slightly aesthetic and high quality or it's lesser variants if that's the case for your prompt."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 34,
"type": "StableCascade_EmptyLatentImage",
"pos": [
-1144,
173
],
"size": {
"0": 310.0334167480469,
"1": 150
},
"flags": {},
"order": 5,
"mode": 0,
"outputs": [
{
"name": "stage_c",
"type": "LATENT",
"links": [
371
],
"shape": 3,
"slot_index": 0
},
{
"name": "stage_b",
"type": "LATENT",
"links": [
522
],
"shape": 3,
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "StableCascade_EmptyLatentImage"
},
"widgets_values": [
1024,
1536,
42,
1
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
-810,
77
],
"size": {
"0": 325.8693542480469,
"1": 262
},
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 501,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 475,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 413
},
{
"name": "latent_image",
"type": "LATENT",
"link": 371
},
{
"name": "seed",
"type": "INT",
"link": 526,
"widget": {
"name": "seed"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
523
],
"slot_index": 0
}
],
"title": "Stage C Sampler\n",
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1008829894834889,
"increment",
30,
6,
"dpmpp_2m_sde_gpu",
"sgm_uniform",
1
]
},
{
"id": 159,
"type": "KSamplerAdvanced",
"pos": [
-124,
75
],
"size": {
"0": 320.61895751953125,
"1": 334
},
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 514
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 471
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 414
},
{
"name": "latent_image",
"type": "LATENT",
"link": 522
},
{
"name": "noise_seed",
"type": "INT",
"link": 527,
"widget": {
"name": "noise_seed"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
477
],
"shape": 3,
"slot_index": 0
}
],
"title": "Stage B Sampler\n",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
1008829894834889,
"increment",
10,
1,
"lcm",
"exponential",
0,
10000,
"disable"
]
},
{
"id": 139,
"type": "UNETLoader",
"pos": [
-459,
82
],
"size": {
"0": 307.0858154296875,
"1": 58
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
492
],
"shape": 3,
"slot_index": 0
}
],
"title": "Load Stage B\n",
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": [
"sotediffusion-v2-stage_b.safetensors"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 206,
"type": "CLIPTextEncode",
"pos": [
222,
592
],
"size": {
"0": 459.0002136230469,
"1": 76
},
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 455
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
559
],
"slot_index": 0
}
],
"title": "Quality Prompts",
"properties": {
"Node name for S&R": "Positive Prompts"
},
"widgets_values": [
"very aesthetic, best quality, newest,"
],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 9,
"type": "SaveImage",
"pos": [
712,
35
],
"size": {
"0": 1206.4493408203125,
"1": 889.9664306640625
},
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 510
}
],
"properties": {},
"widgets_values": [
"SoteDiffusion/SoteDiffusion-Wuerstchen3"
]
},
{
"id": 205,
"type": "ConditioningConcat",
"pos": [
264,
858
],
"size": {
"0": 380.4000244140625,
"1": 46
},
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 559
},
{
"name": "conditioning_from",
"type": "CONDITIONING",
"link": 578,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
475
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningConcat"
}
},
{
"id": 177,
"type": "CLIPTextEncode",
"pos": [
215,
713
],
"size": {
"0": 469.50762939453125,
"1": 96.76290130615234
},
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 451,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
413,
414
],
"slot_index": 0
}
],
"title": "Negative Prompts",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"very displeasing, displeasing, worst quality, bad quality, low quality, realistic, monochrome, comic, sketch, oldest, early, artist name, signature, blurry, simple background, upside down,"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 141,
"type": "UNETLoader",
"pos": [
-1144,
76
],
"size": {
"0": 315,
"1": 58
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
501
],
"shape": 3,
"slot_index": 0
}
],
"title": "Load Stage C\n",
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": [
"sotediffusion-v2-stage_c.safetensors"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 138,
"type": "CLIPLoader",
"pos": [
218,
266
],
"size": {
"0": 471.9256591796875,
"1": 83.96123504638672
},
"flags": {},
"order": 8,
"mode": 0,
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
451,
452,
455
],
"shape": 3,
"slot_index": 0
}
],
"title": "Load Text Encoder\n",
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": [
"sotediffusion-v2-stage_c_text_encoder.safetensors",
"stable_cascade"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 227,
"type": "PrimitiveNode",
"pos": [
-1158,
-107
],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 9,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
526,
527
],
"slot_index": 0,
"widget": {
"name": "seed"
}
}
],
"title": "Seed",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
1008829894834889,
"randomize"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 174,
"type": "CLIPTextEncode",
"pos": [
215,
394
],
"size": {
"0": 472.7580261230469,
"1": 157.9100799560547
},
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 452
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
472,
578
],
"slot_index": 0
}
],
"title": "Positive Prompts",
"properties": {
"Node name for S&R": "Positive Prompts"
},
"widgets_values": [
"1girl, solo, cat ears, low twin braids, straight hair, purple eyes, purple hair, dress, wind, petals, feathers, magic, electricity, bioluminescence, fireflies, cowboy shot, forest, night, casting spell, looking at viewer, swirling light, medium breasts, hand on own chest, reaching, (armpit:0.25), dynamic angle, smile, bare shoulders, rim lighting,"
],
"color": "#232",
"bgcolor": "#353"
}
],
"links": [
[
371,
34,
0,
3,
3,
"LATENT"
],
[
413,
177,
0,
3,
2,
"CONDITIONING"
],
[
414,
177,
0,
159,
2,
"CONDITIONING"
],
[
451,
138,
0,
177,
0,
"CLIP"
],
[
452,
138,
0,
174,
0,
"CLIP"
],
[
455,
138,
0,
206,
0,
"CLIP"
],
[
471,
36,
0,
159,
1,
"CONDITIONING"
],
[
472,
174,
0,
36,
0,
"CONDITIONING"
],
[
475,
205,
0,
3,
1,
"CONDITIONING"
],
[
477,
159,
0,
210,
0,
"LATENT"
],
[
478,
140,
0,
210,
1,
"VAE"
],
[
492,
139,
0,
213,
0,
"MODEL"
],
[
501,
141,
0,
3,
0,
"MODEL"
],
[
510,
210,
0,
9,
0,
"IMAGE"
],
[
514,
213,
0,
159,
0,
"MODEL"
],
[
522,
34,
1,
159,
3,
"LATENT"
],
[
523,
3,
0,
36,
1,
"LATENT"
],
[
526,
227,
0,
3,
4,
"INT"
],
[
527,
227,
0,
159,
4,
"INT"
],
[
559,
206,
0,
205,
0,
"CONDITIONING"
],
[
578,
174,
0,
205,
1,
"CONDITIONING"
]
],
"groups": [
{
"title": "Stage C",
"bounding": [
-1159,
2,
687,
505
],
"color": "#a1309b",
"font_size": 24
},
{
"title": "Stage B",
"bounding": [
-471,
2,
678,
505
],
"color": "#88A",
"font_size": 24
},
{
"title": "Stage A",
"bounding": [
208,
2,
494,
191
],
"color": "#b06634",
"font_size": 24
},
{
"title": "Text Encoder",
"bounding": [
207,
190,
492,
741
],
"color": "#b58b2a",
"font_size": 24
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.4736244074476696,
"offset": [
1467.145677705609,
352.9037089032415
]
}
},
"version": 0.4
}