File size: 30,210 Bytes
ceeafb1
 
 
 
2e902df
 
 
 
 
ceeafb1
 
 
c1d0017
f9b46d8
 
ceeafb1
 
 
 
4424587
 
ceeafb1
 
 
 
 
 
ba7f94c
ceeafb1
 
f9b46d8
c1d0017
 
 
 
ceeafb1
1291827
1692eff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1291827
ceeafb1
4424587
ceeafb1
 
 
4424587
ceeafb1
80b11b0
1692eff
 
12ae06e
80b11b0
1692eff
 
80b11b0
 
 
a3f8f07
80b11b0
 
12ae06e
 
a3f8f07
80b11b0
 
 
 
 
12ae06e
80b11b0
 
 
 
12ae06e
ceeafb1
ecf64bc
ceeafb1
f9b46d8
80b11b0
ceeafb1
80b11b0
ceeafb1
80b11b0
 
 
ceeafb1
80b11b0
ceeafb1
 
 
 
e7a241f
fd40be2
82c4ce2
 
 
 
 
ceeafb1
f1244c8
3de4bee
 
f1244c8
 
3de4bee
 
f1244c8
fd40be2
 
 
 
 
 
 
 
3de4bee
 
fd40be2
 
9032ca0
 
fd40be2
 
 
 
9032ca0
fd40be2
f1244c8
9032ca0
fd40be2
9032ca0
 
 
3de4bee
 
 
 
9032ca0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd40be2
9032ca0
 
72047a7
9032ca0
 
 
f1244c8
ceeafb1
e7a241f
1291827
 
3de4bee
fd40be2
f1244c8
1291827
 
 
 
ceeafb1
9032ca0
ceeafb1
 
9032ca0
 
 
ceeafb1
 
 
 
f1244c8
 
ceeafb1
 
 
 
 
 
 
 
 
 
 
 
4424587
ceeafb1
f1244c8
ceeafb1
 
4424587
eeb2a24
ceeafb1
eeb2a24
4424587
 
eeb2a24
4424587
eeb2a24
4424587
eeb2a24
ceeafb1
 
 
 
f1244c8
ceeafb1
 
 
 
 
 
 
 
 
87303b0
ceeafb1
 
f1959f1
f9b46d8
4424587
 
 
 
 
 
d6a8800
 
fc62d91
ceeafb1
f1959f1
f9b46d8
8fb4aca
ceeafb1
88bad94
4bc3f7a
ceeafb1
0540f3c
 
 
 
 
f9b46d8
 
0540f3c
1291827
f9b46d8
 
0540f3c
f9b46d8
0540f3c
 
 
 
1291827
 
 
d6a8800
 
 
1291827
f9b46d8
d6a8800
f9b46d8
 
 
 
1291827
 
 
 
 
 
 
 
 
f9b46d8
1291827
 
 
 
 
d6a8800
f9b46d8
d6a8800
f9b46d8
1291827
 
d6a8800
f9b46d8
1291827
 
 
 
 
 
 
f9b46d8
1291827
f9b46d8
153429e
f9b46d8
d6a8800
 
1291827
 
 
9032ca0
 
d6a8800
1291827
d6a8800
f9b46d8
1291827
 
 
 
 
 
 
 
 
 
f9b46d8
1291827
d6a8800
f9b46d8
d6a8800
1291827
d6a8800
 
ba7f94c
f9b46d8
 
 
 
 
 
 
 
d6a8800
 
 
 
f9b46d8
d6a8800
 
 
 
8fb4aca
f9b46d8
d6a8800
 
 
 
 
 
 
 
 
bca80c8
ceeafb1
0540f3c
d6a8800
ceeafb1
0540f3c
 
 
 
 
f9b46d8
 
0540f3c
 
1692eff
0540f3c
 
 
 
ceeafb1
1692eff
153429e
731a2de
 
c8ce98f
f9b46d8
d6a8800
f9b46d8
 
d6a8800
f9b46d8
 
d6a8800
f9b46d8
 
0540f3c
 
f9b46d8
1291827
3de4bee
1291827
 
f9b46d8
0540f3c
f9b46d8
1692eff
 
 
 
f9b46d8
 
b875b4d
f9b46d8
 
 
 
 
12ae06e
ceeafb1
f9b46d8
ceeafb1
f9b46d8
 
c8f1ef6
f9b46d8
 
 
c8f1ef6
f9b46d8
12ae06e
f9b46d8
4424587
d6a8800
bca80c8
ceeafb1
 
 
bb6c2f5
 
 
 
 
ceeafb1
bb6c2f5
 
 
 
 
 
ceeafb1
 
 
 
 
 
 
0540f3c
 
 
88bad94
0540f3c
 
 
ceeafb1
88bad94
 
 
ceeafb1
bca80c8
 
f9b46d8
4424587
 
ceeafb1
 
 
 
 
 
 
 
 
 
 
0540f3c
f9b46d8
 
fc62d91
bb6c2f5
fc62d91
0540f3c
 
 
4424587
0540f3c
ceeafb1
4424587
 
 
0540f3c
 
ceeafb1
bca80c8
 
 
 
 
 
 
 
f9b46d8
bca80c8
 
ceeafb1
fc62d91
153429e
f9b46d8
4424587
ceeafb1
 
 
 
 
 
 
 
 
 
 
 
 
 
45612dd
ceeafb1
 
 
 
f1244c8
ceeafb1
 
 
 
 
 
 
 
 
 
 
 
d2c503c
ceeafb1
6aaf519
d2c503c
88bad94
2e902df
ceeafb1
2e902df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87303b0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
{
  "cells": [
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "2lJmbqrs3Mu8"
      },
      "outputs": [],
      "source": [
        "##~   DOWNLOADING CODE | BY: ANXETY   ~##\n",
        "\n",
        "from directory_setup import *\n",
        "from models_data import model_list, vae_list, controlnet_list\n",
        "\n",
        "import os\n",
        "import re\n",
        "import time\n",
        "import json\n",
        "import shutil\n",
        "import zipfile\n",
        "import requests\n",
        "import subprocess\n",
        "from datetime import timedelta\n",
        "from subprocess import getoutput\n",
        "from IPython.utils import capture\n",
        "from IPython.display import clear_output\n",
        "from urllib.parse import urlparse, parse_qs\n",
        "\n",
        "\n",
        "# Setup Env\n",
        "env = os.getenv('ENV_NAME')\n",
        "root_path = os.getenv('ROOT_PATH')\n",
        "webui_path = os.getenv('WEBUI_PATH')\n",
        "free_plan = os.getenv('FREE_PLAN')\n",
        "\n",
        "UI = os.getenv('SDW_UI')\n",
        "OLD_UI = os.getenv('SDW_OLD_UI')\n",
        "\n",
        "\n",
        "# ============ loading settings V4 =============\n",
        "def load_settings(path):\n",
        "    if os.path.exists(path):\n",
        "        with open(path, 'r') as file:\n",
        "            return json.load(file)\n",
        "    return {}\n",
        "\n",
        "settings = load_settings(f'{root_path}/settings.json')\n",
        "\n",
        "VARIABLES = [\n",
        "    'model', 'model_num', 'inpainting_model',\n",
        "    'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n",
        "    'change_webui', 'detailed_download', 'controlnet',\n",
        "    'controlnet_num', 'commit_hash', 'huggingface_token',\n",
        "    'ngrok_token', 'zrok_token', 'commandline_arguments',\n",
        "    'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n",
        "    'Extensions_url', 'custom_file_urls'\n",
        "]\n",
        "\n",
        "locals().update({key: settings.get(key) for key in VARIABLES})\n",
        "\n",
        "\n",
        "# ================ LIBRARIES V2 ================\n",
        "flag_file = f\"{root_path}/libraries_installed.txt\"\n",
        "\n",
        "if not os.path.exists(flag_file):\n",
        "    print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
        "\n",
        "    install_lib = {\n",
        "        # \"aria2\": \"apt -y install aria2\",\n",
        "        \"aria2\": \"pip install aria2\",\n",
        "        \"localtunnel\": \"npm install -g localtunnel\",\n",
        "    }\n",
        "    if controlnet != 'none':\n",
        "        install_lib[\"insightface\"] = \"pip install insightface\"\n",
        "\n",
        "    additional_libs = {\n",
        "        \"Google Colab\": {\n",
        "            \"xformers\": \"pip install xformers==0.0.27 --no-deps\"\n",
        "        },\n",
        "        \"Kaggle\": {\n",
        "            \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
        "            # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
        "            # \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
        "        }\n",
        "    }\n",
        "    if env in additional_libs:\n",
        "        install_lib.update(additional_libs[env])\n",
        "\n",
        "    # Loop through libraries\n",
        "    for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
        "        print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
        "        subprocess.run(install_cmd, shell=True, capture_output=True)\n",
        "\n",
        "    # Additional specific packages\n",
        "    with capture.capture_output() as cap:\n",
        "        !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
        "        !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
        "        !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n",
        "    del cap\n",
        "\n",
        "    clear_output()\n",
        "\n",
        "    # Save file install lib\n",
        "    with open(flag_file, \"w\") as f:\n",
        "        f.write(\">W<'\")\n",
        "\n",
        "    print(\"🍪 Libraries are installed!\" + \" \"*35)\n",
        "    time.sleep(2)\n",
        "    clear_output()\n",
        "\n",
        "\n",
        "# =================== OTHER ====================\n",
        "# Setup Timer\n",
        "if \"START_COLAB\" in os.environ:\n",
        "    start_colab = int(os.environ[\"START_COLAB\"])\n",
        "else:\n",
        "    start_colab = int(time.time()) - 5\n",
        "    os.environ[\"START_COLAB\"] = str(start_colab)\n",
        "\n",
        "# remove directory func\n",
        "def _remove_dir(directory_path):\n",
        "    if directory_path and os.path.exists(directory_path):\n",
        "        try:\n",
        "            shutil.rmtree(directory_path)\n",
        "        except Exception:\n",
        "            !rm -rf {directory_path}\n",
        "\n",
        "# Save files temporarily\n",
        "temporarily_dir = f'{root_path}/temp_dir'\n",
        "\n",
        "def copy_items_with_replace(src_base, dst_base):\n",
        "    items_to_copy = [\n",
        "        'embeddings',\n",
        "        'models/Stable-diffusion',\n",
        "        'models/VAE',\n",
        "        'models/Lora',\n",
        "        'models/ControlNet'\n",
        "    ]\n",
        "\n",
        "    print(\"Moving files...\", end='')\n",
        "    time.sleep(1)\n",
        "    for item in items_to_copy:\n",
        "        src = os.path.join(src_base, item)\n",
        "        dst = os.path.join(dst_base, item)\n",
        "\n",
        "        if os.path.exists(src):\n",
        "            if os.path.exists(dst):\n",
        "                _remove_dir(dst)\n",
        "            os.makedirs(os.path.dirname(dst), exist_ok=True)\n",
        "            shutil.move(src, dst)\n",
        "    print(\"\\r🔥 Files moved!\" + \" \"*15)\n",
        "\n",
        "def download_and_unpack(url, dest_path):\n",
        "    aria2_args = \"--console-log-level=error -c -x 16 -s 16 -k 1M\"\n",
        "    !aria2c {aria2_args} '{url}' -o repo.zip\n",
        "    !unzip -q -o repo.zip -d {dest_path}\n",
        "    !rm -rf repo.zip\n",
        "\n",
        "def handle_colab_timer(webui_path, timer_colab):\n",
        "    timer_file_path = os.path.join(webui_path, 'static', 'colabTimer.txt')\n",
        "    if not os.path.exists(timer_file_path):\n",
        "        with open(timer_file_path, 'w') as timer_file:\n",
        "            timer_file.write(str(timer_colab))\n",
        "    else:\n",
        "        with open(timer_file_path, 'r') as timer_file:\n",
        "            timer_colab = float(timer_file.read())\n",
        "    return timer_colab\n",
        "\n",
        "def unpack_webui():\n",
        "    start_install = time.time()\n",
        "    print(f\"⌚ Unpacking Stable Diffusion{' (Forge)' if UI == 'Forge' else ''}...\", end='')\n",
        "\n",
        "    with capture.capture_output() as cap:\n",
        "        download_url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\"\n",
        "        if UI == 'Forge':\n",
        "            download_url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n",
        "\n",
        "        download_and_unpack(download_url, webui_path)\n",
        "\n",
        "        get_ipython().system(f'echo -n {start_colab} > {webui_path}/static/colabTimer.txt')\n",
        "    del cap\n",
        "\n",
        "    install_time = time.time() - start_install\n",
        "    minutes, seconds = divmod(int(install_time), 60)\n",
        "    print(f\"\\r🚀 Unpacking complete! For {minutes:02}:{seconds:02} ⚡\" + \" \"*15)\n",
        "\n",
        "    if os.path.exists(temporarily_dir):\n",
        "        copy_items_with_replace(temporarily_dir, webui_path)\n",
        "        _remove_dir(temporarily_dir)\n",
        "\n",
        "# ================= MAIN CODE ==================\n",
        "if os.path.exists(webui_path):\n",
        "    if UI != OLD_UI:\n",
        "        print(f'Switching the WebUI from \\033[33m{OLD_UI}\\033[0m to \\033[33m{UI}\\033[0m:\\n')\n",
        "        copy_items_with_replace(webui_path, temporarily_dir)\n",
        "        _remove_dir(webui_path)\n",
        "        os.environ['SDW_OLD_UI'] = UI\n",
        "        time.sleep(2)\n",
        "        clear_output()\n",
        "\n",
        "if not os.path.exists(webui_path):\n",
        "    unpack_webui()\n",
        "else:\n",
        "    print(\"🚀 All unpacked... Skip. ⚡\")\n",
        "    timer_colab = handle_colab_timer(webui_path, start_colab)\n",
        "    elapsed_time = str(timedelta(seconds=time.time() - timer_colab)).split('.')[0]\n",
        "    print(f\"⌚️ You have been conducting this session for - \\033[33m{elapsed_time}\\033[0m\")\n",
        "\n",
        "\n",
        "## Changes extensions and WebUi\n",
        "if latest_webui or latest_exstensions:\n",
        "    action = \"WebUI and Extensions\" if latest_webui and latest_exstensions else (\"WebUI\" if latest_webui else \"Extensions\")\n",
        "    print(f\"⌚️ Updating {action}...\", end='')\n",
        "    with capture.capture_output() as cap:\n",
        "        !git config --global user.email \"[email protected]\"\n",
        "        !git config --global user.name \"Your Name\"\n",
        "\n",
        "        ## Update Webui\n",
        "        if latest_webui:\n",
        "            %cd {webui_path}\n",
        "            !git restore .\n",
        "            !git pull -X theirs --rebase --autostash\n",
        "\n",
        "        ## Update extensions\n",
        "        if latest_exstensions:\n",
        "            !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
        "    del cap\n",
        "    print(f\"\\r✨ Updating {action} Completed!\")\n",
        "\n",
        "\n",
        "# === FIXING EXTENSIONS ===\n",
        "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
        "\n",
        "with capture.capture_output() as cap:\n",
        "    # --- Umi-Wildcard ---\n",
        "    !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py  # Closed accordion by default\n",
        "    # --- Encrypt-Image ---\n",
        "    !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
        "    # --- Additional-Networks ---\n",
        "    !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py  # Fixing an error due to old style\n",
        "del cap\n",
        "\n",
        "\n",
        "## Version switching\n",
        "if commit_hash:\n",
        "    print('⏳ Time machine activation...', end=\"\")\n",
        "    with capture.capture_output() as cap:\n",
        "        %cd {webui_path}\n",
        "        !git config --global user.email \"[email protected]\"\n",
        "        !git config --global user.name \"Your Name\"\n",
        "        !git reset --hard {commit_hash}\n",
        "    del cap\n",
        "    print(f\"\\r⌛️ The time machine has been activated! Current commit: \\033[34m{commit_hash}\\033[0m\")\n",
        "\n",
        "\n",
        "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
        "print(\"📦 Downloading models and stuff...\", end='')\n",
        "\n",
        "url = \"\"\n",
        "PREFIXES = {\n",
        "    \"model\": models_dir,\n",
        "    \"vae\": vaes_dir,\n",
        "    \"lora\": loras_dir,\n",
        "    \"embed\": embeddings_dir,\n",
        "    \"extension\": extensions_dir,\n",
        "    \"control\": control_dir,\n",
        "    \"adetailer\": adetailer_dir,\n",
        "    \"config\": webui_path\n",
        "}\n",
        "\n",
        "extension_repo = []\n",
        "directories = [value for key, value in PREFIXES.items()] # for unpucking zip files\n",
        "!mkdir -p {\" \".join(directories)}\n",
        "\n",
        "hf_token = huggingface_token if huggingface_token else \"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\"\n",
        "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
        "\n",
        "''' Formatted Info Output '''\n",
        "\n",
        "from math import floor\n",
        "\n",
        "def center_text(text, terminal_width=45):\n",
        "    padding = (terminal_width - len(text)) // 2\n",
        "    return f\"\\033[1m\\033[36m{' ' * padding}{text}{' ' * padding}\\033[0m\\033[32m\"\n",
        "\n",
        "def format_output(url, dst_dir, file_name, image_name=None, image_url=None):\n",
        "    info = center_text(f\"[{file_name.split('.')[0]}]\")\n",
        "    separation_line = '\\033[32m' + '---' * 20\n",
        "\n",
        "    print(f\"\\n{separation_line}{info}{separation_line}\")\n",
        "    print(f\"\\033[33mURL: \\033[34m{url}\")\n",
        "    print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
        "    print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
        "\n",
        "    if 'civitai' in url and image_url:\n",
        "        print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
        "\n",
        "''' GET CivitAi API - DATA '''\n",
        "\n",
        "def CivitAi_API(url, file_name=None):\n",
        "    SUPPORT_TYPES = ('Checkpoint', 'TextualInversion', 'LORA')\n",
        "    CIVITAI_TOKEN = \"62c0c5956b2f9defbd844d754000180b\"\n",
        "\n",
        "    url = url.split('?token=')[0] if '?token=' in url else url\n",
        "    url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f\"{url}?token={CIVITAI_TOKEN}\"\n",
        "\n",
        "    def get_model_data(url):\n",
        "        base_url = \"https://civitai.com/api/v1\"\n",
        "        try:\n",
        "            if \"civitai.com/models/\" in url:\n",
        "                if '?modelVersionId=' in url:\n",
        "                    version_id = url.split('?modelVersionId=')[1]\n",
        "                else:\n",
        "                    model_id = url.split('/models/')[1].split('/')[0]\n",
        "                    model_data = requests.get(f\"{base_url}/models/{model_id}\").json()\n",
        "                    version_id = model_data['modelVersions'][0].get('id')\n",
        "            else:\n",
        "                version_id = url.split('/models/')[1].split('/')[0]\n",
        "\n",
        "            return requests.get(f\"{base_url}/model-versions/{version_id}\").json()\n",
        "        except (KeyError, IndexError, requests.RequestException) as e:\n",
        "            return None\n",
        "\n",
        "    data = get_model_data(url)\n",
        "\n",
        "    if not data:\n",
        "        print(\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\")\n",
        "        return 'None', None, None, None, None, None, None\n",
        "\n",
        "    def extract_model_info(url, data):\n",
        "        model_type = data['model']['type']\n",
        "        model_name = data['files'][0]['name']\n",
        "\n",
        "        if 'type=' in url:\n",
        "            url_model_type = parse_qs(urlparse(url).query).get('type', [''])[0].lower()\n",
        "            if 'vae' in url_model_type:\n",
        "                model_type = data['files'][1]['type']\n",
        "                model_name = data['files'][1]['name']\n",
        "\n",
        "        return model_type, model_name\n",
        "\n",
        "    model_type, model_name = extract_model_info(url, data)\n",
        "    model_name = file_name or model_name\n",
        "\n",
        "    def get_download_url(data, model_type):\n",
        "        if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):\n",
        "            return data['files'][0]['downloadUrl']\n",
        "\n",
        "        return data['files'][1]['downloadUrl'] if 'type' in url else data['files'][0]['downloadUrl']\n",
        "\n",
        "    download_url = get_download_url(data, model_type)\n",
        "\n",
        "    def get_image_info(data, model_type, model_name):\n",
        "        if not any(t in model_type for t in SUPPORT_TYPES):\n",
        "            return None, None\n",
        "\n",
        "        for image in data.get('images', []):\n",
        "            if image['nsfwLevel'] >= 4 and env == 'Kaggle':  # Filter NSFW images for Kaggle\n",
        "                continue\n",
        "            image_url = image['url']\n",
        "            image_extension = image_url.split('.')[-1]\n",
        "            image_name = f\"{model_name.split('.')[0]}.preview.{image_extension}\" if image_url else None\n",
        "            return image_url, image_name\n",
        "\n",
        "        return None, None\n",
        "\n",
        "    image_url, image_name = get_image_info(data, model_type, model_name)\n",
        "\n",
        "    return f\"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}\", download_url, model_type, model_name, image_url, image_name, data\n",
        "\n",
        "''' Main Download Code '''\n",
        "\n",
        "def strip_(url):\n",
        "    if 'github.com' in url:\n",
        "        return url.replace('/blob/', '/raw/')\n",
        "    elif \"huggingface.co\" in url:\n",
        "        url = url.replace('/blob/', '/resolve/')\n",
        "        return url.split('?')[0] if '?' in url else url\n",
        "    return url\n",
        "\n",
        "def download(url):\n",
        "    links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
        "\n",
        "    for link_or_path in links_and_paths:\n",
        "        if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):\n",
        "            handle_manual(link_or_path)\n",
        "        else:\n",
        "            url, dst_dir, file_name = link_or_path.split()\n",
        "            manual_download(url, dst_dir, file_name)\n",
        "\n",
        "    # Unpuck ZIPs Files\n",
        "    for directory in directories:\n",
        "        for root, _, files in os.walk(directory):\n",
        "            for file in files:\n",
        "                if file.endswith(\".zip\"):\n",
        "                    zip_path = os.path.join(root, file)\n",
        "                    extract_path = os.path.splitext(zip_path)[0]\n",
        "                    with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
        "                        zip_ref.extractall(extract_path)\n",
        "                    os.remove(zip_path)\n",
        "\n",
        "def handle_manual(url):\n",
        "    url_parts = url.split(':', 1)\n",
        "    prefix, path = url_parts[0], url_parts[1]\n",
        "\n",
        "    file_name_match = re.search(r'\\[(.*?)\\]', path)\n",
        "    file_name = file_name_match.group(1) if file_name_match else None\n",
        "    if file_name:\n",
        "        path = re.sub(r'\\[.*?\\]', '', path)\n",
        "\n",
        "    if prefix in PREFIXES:\n",
        "        dir = PREFIXES[prefix]\n",
        "        if prefix != \"extension\":\n",
        "            try:\n",
        "                manual_download(path, dir, file_name=file_name, prefix=prefix)\n",
        "            except Exception as e:\n",
        "                print(f\"Error downloading file: {e}\")\n",
        "        else:\n",
        "            extension_repo.append((path, file_name))\n",
        "\n",
        "def manual_download(url, dst_dir, file_name, prefix=None):\n",
        "    header_option = f\"--header={user_header}\"\n",
        "    aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
        "    aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
        "\n",
        "    clean_url = strip_(url)\n",
        "\n",
        "    if 'civitai' in url:\n",
        "        url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)\n",
        "        if image_url and image_name:\n",
        "            command = [\"aria2c\"] + aria2_args.split() + [\"-d\", dst_dir, \"-o\", image_name, image_url]\n",
        "            subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n",
        "\n",
        "    elif 'github' in url or \"huggingface.co\" in url:\n",
        "        basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
        "\n",
        "    \"\"\" Formatted info output \"\"\"\n",
        "    model_name_or_basename = file_name if file_name else basename\n",
        "    try:\n",
        "        format_output(clean_url or url, dst_dir, model_name_or_basename, image_name, image_url)\n",
        "    except UnboundLocalError:\n",
        "        format_output(clean_url or url, dst_dir, model_name_or_basename, None, None)\n",
        "\n",
        "    # =====================\n",
        "    def run_aria2c(url, dst_dir, file_name=None, args=\"\", header=\"\"):\n",
        "        file_path = os.path.join(dst_dir, file_name) # replaces config files\n",
        "        if os.path.exists(file_path) and prefix == 'config':\n",
        "            os.remove(file_path)\n",
        "\n",
        "        out = f\"-o '{file_name}'\" if file_name else \"\"\n",
        "        !aria2c {header} {args} -d {dst_dir} {out} '{url}'\n",
        "\n",
        "    # -- Google Drive --\n",
        "    if 'drive.google' in url:\n",
        "        if not globals().get('have_drive_link', False):\n",
        "            os.system(\"pip install -U gdown > /dev/null\")\n",
        "            globals()['have_drive_link'] = True\n",
        "\n",
        "        if 'folders' in url:\n",
        "            os.system(f\"gdown --folder \\\"{url}\\\" -O {dst_dir} --fuzzy -c\")\n",
        "        else:\n",
        "            out_path = f\"{dst_dir}/{file_name}\" if file_name else dst_dir\n",
        "            os.system(f\"gdown \\\"{url}\\\" -O {out_path} --fuzzy -c\")\n",
        "\n",
        "    # -- GitHub or Hugging Face --\n",
        "    elif 'github' in url or 'huggingface' in url:\n",
        "        run_aria2c(clean_url, dst_dir, basename, aria2_args, header_option if 'huggingface' in url else '')\n",
        "\n",
        "    # -- Other HTTP/Sources --\n",
        "    elif 'http' in url:\n",
        "        run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)\n",
        "\n",
        "''' SubModels - Added URLs '''\n",
        "\n",
        "def add_submodels(selection, num_selection, model_dict, dst_dir):\n",
        "    if selection == \"none\":\n",
        "        return []\n",
        "    if selection == \"ALL\":\n",
        "        all_models = []\n",
        "        for models in model_dict.values():\n",
        "            all_models.extend(models)\n",
        "        selected_models = all_models\n",
        "    else:\n",
        "        selected_models = model_dict[selection]\n",
        "        selected_nums = map(int, num_selection.replace(',', '').split())\n",
        "        for num in selected_nums:\n",
        "            if 1 <= num <= len(model_dict):\n",
        "                name = list(model_dict)[num - 1]\n",
        "                selected_models.extend(model_dict[name])\n",
        "\n",
        "    unique_models = list({model['name']: model for model in selected_models}.values())\n",
        "    for model in unique_models:\n",
        "        model['dst_dir'] = dst_dir\n",
        "\n",
        "    return unique_models\n",
        "\n",
        "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n",
        "    submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n",
        "    for submodel in submodels:\n",
        "        if not inpainting_model and \"inpainting\" in submodel['name']:\n",
        "            continue\n",
        "        url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n",
        "    return url\n",
        "\n",
        "url = handle_submodels(model, model_num, model_list, models_dir, url)\n",
        "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n",
        "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n",
        "\n",
        "''' file.txt - added urls '''\n",
        "\n",
        "def process_file_download(file_url, PREFIXES, unique_urls):\n",
        "    files_urls = \"\"\n",
        "\n",
        "    if file_url.startswith(\"http\"):\n",
        "        if \"blob\" in file_url:\n",
        "            file_url = file_url.replace(\"blob\", \"raw\")\n",
        "        response = requests.get(file_url)\n",
        "        lines = response.text.split('\\n')\n",
        "    else:\n",
        "        with open(file_url, 'r') as file:\n",
        "            lines = file.readlines()\n",
        "\n",
        "    current_tag = None\n",
        "    for line in lines:\n",
        "        line = line.strip()\n",
        "        if any(f'# {tag}' in line.lower() for tag in PREFIXES):\n",
        "            current_tag = next((tag for tag in PREFIXES if tag in line.lower()))\n",
        "\n",
        "        urls = [url.split('#')[0].strip() for url in line.split(',')]  # filter urls\n",
        "        for url in urls:\n",
        "            filter_url = url.split('[')[0] # same url filter\n",
        "\n",
        "            if url.startswith(\"http\") and filter_url not in unique_urls:\n",
        "                files_urls += f\"{current_tag}:{url}, \"\n",
        "                unique_urls.add(filter_url)\n",
        "\n",
        "    return files_urls\n",
        "\n",
        "file_urls = \"\"\n",
        "unique_urls = set()\n",
        "\n",
        "if custom_file_urls:\n",
        "    for custom_file_url in custom_file_urls.replace(',', '').split():\n",
        "        if not custom_file_url.endswith('.txt'):\n",
        "            custom_file_url += '.txt'\n",
        "        if not custom_file_url.startswith('http'):\n",
        "            if not custom_file_url.startswith(root_path):\n",
        "                custom_file_url = f'{root_path}/{custom_file_url}'\n",
        "\n",
        "        try:\n",
        "            file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)\n",
        "        except FileNotFoundError:\n",
        "            pass\n",
        "\n",
        "# url prefixing\n",
        "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
        "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())\n",
        "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
        "\n",
        "if detailed_download == \"on\":\n",
        "    print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n",
        "    download(url)\n",
        "    print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n",
        "else:\n",
        "    with capture.capture_output() as cap:\n",
        "        download(url)\n",
        "    del cap\n",
        "\n",
        "print(\"\\r🏁 Download Complete!\" + \" \"*15)\n",
        "\n",
        "\n",
        "# Cleaning shit after downloading...\n",
        "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n",
        "\n",
        "\n",
        "## Install of Custom extensions\n",
        "if len(extension_repo) > 0:\n",
        "    print(\"✨ Installing custom extensions...\", end='')\n",
        "    with capture.capture_output() as cap:\n",
        "        for repo, repo_name in extension_repo:\n",
        "            if not repo_name:\n",
        "                repo_name = repo.split('/')[-1]\n",
        "            !cd {extensions_dir} \\\n",
        "                && git clone {repo} {repo_name} \\\n",
        "                && cd {repo_name} \\\n",
        "                && git fetch\n",
        "    del cap\n",
        "    print(f\"\\r📦 Installed '{len(extension_repo)}', Custom extensions!\")\n",
        "\n",
        "\n",
        "## List Models and stuff V2\n",
        "if detailed_download == \"off\":\n",
        "    print(\"\\n\\n\\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.\")\n",
        "\n",
        "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result"
      ]
    }
  ],
  "metadata": {
    "colab": {
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}