{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "id": "2lJmbqrs3Mu8" }, "outputs": [], "source": [ "##~ DOWNLOADING CODE | BY: ANXETY ~##\n", "\n", "from directory_setup import *\n", "from models_data import model_list, vae_list, controlnet_list\n", "\n", "import os\n", "import re\n", "import time\n", "import json\n", "import shutil\n", "import zipfile\n", "import requests\n", "import subprocess\n", "from datetime import timedelta\n", "from subprocess import getoutput\n", "from IPython.utils import capture\n", "from IPython.display import clear_output\n", "from urllib.parse import urlparse, parse_qs\n", "\n", "\n", "# Setup Env\n", "env = os.getenv('ENV_NAME')\n", "root_path = os.getenv('ROOT_PATH')\n", "webui_path = os.getenv('WEBUI_PATH')\n", "free_plan = os.getenv('FREE_PLAN')\n", "\n", "UI = os.getenv('SDW_UI')\n", "OLD_UI = os.getenv('SDW_OLD_UI')\n", "\n", "os.chdir(root_path)\n", "\n", "\n", "# ============ loading settings V4 =============\n", "def load_settings(path):\n", " if os.path.exists(path):\n", " with open(path, 'r') as file:\n", " return json.load(file)\n", " return {}\n", "\n", "settings = load_settings(f'{root_path}/settings.json')\n", "\n", "VARIABLES = [\n", " 'model', 'model_num', 'inpainting_model',\n", " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n", " 'change_webui', 'detailed_download', 'controlnet',\n", " 'controlnet_num', 'commit_hash', 'huggingface_token',\n", " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n", " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n", " 'Extensions_url', 'custom_file_urls'\n", "]\n", "\n", "locals().update({key: settings.get(key) for key in VARIABLES})\n", "\n", "\n", "# ================ LIBRARIES V2 ================\n", "flag_file = f\"{root_path}/libraries_installed.txt\"\n", "\n", "if not os.path.exists(flag_file):\n", " print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n", "\n", " install_lib = {\n", " # \"aria2\": \"apt -y install aria2\",\n", " \"aria2\": \"pip install aria2\",\n", " \"localtunnel\": \"npm install -g localtunnel\",\n", " }\n", " if controlnet != 'none':\n", " install_lib[\"insightface\"] = \"pip install insightface\"\n", "\n", " additional_libs = {\n", " \"Google Colab\": {\n", " \"xformers\": \"pip install xformers==0.0.27 --no-deps\"\n", " },\n", " \"Kaggle\": {\n", " \"xformers\": \"pip install xformers==0.0.26.post1\",\n", " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n", " # \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n", " }\n", " }\n", " if env in additional_libs:\n", " install_lib.update(additional_libs[env])\n", "\n", " # Loop through libraries\n", " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n", " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n", " subprocess.run(install_cmd, shell=True, capture_output=True)\n", "\n", " # Additional specific packages\n", " with capture.capture_output():\n", " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n", " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n", " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n", "\n", " clear_output()\n", "\n", " # Save file install lib\n", " with open(flag_file, \"w\") as f:\n", " f.write(\">W<'\")\n", "\n", " print(\"🍪 Библиотеки установлены!\" + \" \"*35)\n", " time.sleep(2)\n", " clear_output()\n", "\n", "\n", "# =================== OTHER ====================\n", "# Setup Timer\n", "start_colab = int(os.environ.get(\"START_COLAB\", time.time() - 5))\n", "os.environ[\"START_COLAB\"] = str(start_colab)\n", "\n", "def download_cfg_files(file_paths, destination_path):\n", " base_url = \"https://huggingface.co/NagisaNao/SD-CONFIGS/resolve/main\"\n", " for filename in file_paths:\n", " file_name = filename.split('/')[-1]\n", " !wget -O {destination_path}/{file_name} {base_url}/{filename}\n", "\n", "def cfg_download():\n", " common_files = [\"styles.csv\"]\n", " a1111_files = [\"A1111/config.json\", \"A1111/ui-config.json\"]\n", " forge_files = [\"reForge/config.json\", \"reForge/ui-config.json\"]\n", "\n", " with capture.capture_output():\n", " download_cfg_files(common_files, webui_path)\n", " ui_files = a1111_files if UI == 'A1111' else forge_files\n", " download_cfg_files(ui_files, webui_path)\n", "\n", "def remove_dir(directory_path):\n", " if directory_path and os.path.exists(directory_path):\n", " try:\n", " shutil.rmtree(directory_path)\n", " except Exception:\n", " !rm -rf {directory_path}\n", "\n", "TEMPORARY_DIR = f'{root_path}/temp_dir'\n", "def copy_items_with_replace(src_base, dst_base):\n", " items_to_copy = [\n", " 'embeddings',\n", " 'models/Stable-diffusion',\n", " 'models/VAE',\n", " 'models/Lora',\n", " 'models/ControlNet'\n", " ]\n", "\n", " print(\"⌚ Перемещение файлов...\", end='')\n", " for item in items_to_copy:\n", " src = os.path.join(src_base, item)\n", " dst = os.path.join(dst_base, item)\n", "\n", " if os.path.exists(src):\n", " if os.path.exists(dst):\n", " remove_dir(dst)\n", " os.makedirs(os.path.dirname(dst), exist_ok=True)\n", " shutil.move(src, dst)\n", " print(\"\\r🔥 Файлы перемещены!\" + \" \"*15)\n", "\n", "def handle_colab_timer(webui_path, timer_colab):\n", " timer_file_path = os.path.join(webui_path, 'static', 'colabTimer.txt')\n", " if not os.path.exists(timer_file_path):\n", " with open(timer_file_path, 'w') as timer_file:\n", " timer_file.write(str(timer_colab))\n", " else:\n", " with open(timer_file_path, 'r') as timer_file:\n", " timer_colab = float(timer_file.read())\n", " return timer_colab\n", "\n", "def unpack_webui():\n", " start_install = time.time()\n", " print(f\"⌚ Распаковка Stable Diffusion{' (Forge)' if UI == 'Forge' else ''}...\", end='')\n", "\n", " with capture.capture_output():\n", " download_url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\"\n", " if UI == 'Forge':\n", " download_url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n", "\n", " zip_path = f\"{root_path}/repo.zip\"\n", " !aria2c --console-log-level=error -c -x 16 -s 16 -k 1M {download_url} -d {root_path} -o repo.zip\n", " !unzip -q -o {zip_path} -d {webui_path}\n", " !rm -rf {zip_path}\n", "\n", " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n", "\n", " install_time = time.time() - start_install\n", " minutes, seconds = divmod(int(install_time), 60)\n", " print(f\"\\r🚀 Распаковка Завершена! За {minutes:02}:{seconds:02} ⚡\" + \" \"*15)\n", "\n", " if os.path.exists(TEMPORARY_DIR):\n", " copy_items_with_replace(TEMPORARY_DIR, webui_path)\n", " remove_dir(TEMPORARY_DIR)\n", "\n", "# ================= MAIN CODE ==================\n", "if os.path.exists(webui_path):\n", " if UI != OLD_UI:\n", " print(f'Переключение веб-интерфейса с \\033[33m{OLD_UI}\\033[0m на \\033[33m{UI}\\033[0m:')\n", " copy_items_with_replace(webui_path, TEMPORARY_DIR)\n", " remove_dir(webui_path)\n", " os.environ['SDW_OLD_UI'] = UI\n", " time.sleep(2)\n", " clear_output()\n", "\n", "if not os.path.exists(webui_path):\n", " unpack_webui()\n", " cfg_download()\n", "else:\n", " print(\"🚀 Все распакованно... Пропуск. ⚡\")\n", " timer_colab = handle_colab_timer(webui_path, start_colab)\n", " elapsed_time = str(timedelta(seconds=time.time() - timer_colab)).split('.')[0]\n", " print(f\"⌚️ Вы проводите эту сессию в течение - \\033[33m{elapsed_time}\\033[0m\")\n", "\n", "\n", "## Changes extensions and WebUi\n", "if latest_webui or latest_exstensions:\n", " action = \"WebUI и Расширений\" if latest_webui and latest_exstensions else (\"WebUI\" if latest_webui else \"Расширений\")\n", " print(f\"⌚️ Обновление {action}...\", end='')\n", " with capture.capture_output():\n", " !git config --global user.email \"you@example.com\"\n", " !git config --global user.name \"Your Name\"\n", "\n", " ## Update Webui\n", " if latest_webui:\n", " %cd {webui_path}\n", " !git restore .\n", " !git pull -X theirs --rebase --autostash\n", "\n", " ## Update extensions\n", " if latest_exstensions:\n", " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n", " print(f\"\\r✨ Обновление {action} Завершено!\")\n", "\n", "\n", "# === FIXING EXTENSIONS ===\n", "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n", "with capture.capture_output():\n", " # --- Umi-Wildcard ---\n", " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n", " # --- Encrypt-Image ---\n", " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n", "\n", "\n", "## Version switching\n", "if commit_hash:\n", " print('⏳ Активация машины времени...', end=\"\")\n", " with capture.capture_output():\n", " %cd {webui_path}\n", " !git config --global user.email \"you@example.com\"\n", " !git config --global user.name \"Your Name\"\n", " !git reset --hard {commit_hash}\n", " print(f\"\\r⌛️ Машина времени активированна! Текущий коммит: \\033[34m{commit_hash}\\033[0m\")\n", "\n", "\n", "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n", "print(\"📦 Скачивание моделей и прочего...\", end='')\n", "\n", "extension_repo = []\n", "PREFIXES = {\n", " \"model\": models_dir,\n", " \"vae\": vaes_dir,\n", " \"lora\": loras_dir,\n", " \"embed\": embeddings_dir,\n", " \"extension\": extensions_dir,\n", " \"control\": control_dir,\n", " \"adetailer\": adetailer_dir,\n", " \"config\": webui_path\n", "}\n", "!mkdir -p {\" \".join(PREFIXES.values())}\n", "\n", "''' Formatted Info Output '''\n", "\n", "def center_text(text, terminal_width=45):\n", " padding = (terminal_width - len(text)) // 2\n", " return f\"{' ' * padding}{text}{' ' * padding}\"\n", "\n", "def format_output(url, dst_dir, file_name, image_name=None, image_url=None):\n", " info = center_text(f\"[{file_name.split('.')[0]}]\")\n", " sep_line = '---' * 20\n", "\n", " print(f\"\\n\\033[32m{sep_line}\\033[36;1m{info}\\033[32m{sep_line}\\033[0m\")\n", " print(f\"\\033[33mURL: {url}\")\n", " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n", " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n", " if 'civitai' in url and image_url:\n", " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n", "\n", "''' GET CivitAi API - DATA '''\n", "\n", "def CivitAi_API(url, file_name=None):\n", " SUPPORT_TYPES = ('Checkpoint', 'TextualInversion', 'LORA')\n", " CIVITAI_TOKEN = \"62c0c5956b2f9defbd844d754000180b\"\n", "\n", " url = url.split('?token=')[0] if '?token=' in url else url\n", " url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f\"{url}?token={CIVITAI_TOKEN}\"\n", "\n", " def get_model_data(url):\n", " base_url = \"https://civitai.com/api/v1\"\n", " try:\n", " if \"civitai.com/models/\" in url:\n", " if '?modelVersionId=' in url:\n", " version_id = url.split('?modelVersionId=')[1]\n", " else:\n", " model_id = url.split('/models/')[1].split('/')[0]\n", " model_data = requests.get(f\"{base_url}/models/{model_id}\").json()\n", " version_id = model_data['modelVersions'][0].get('id')\n", " else:\n", " version_id = url.split('/models/')[1].split('/')[0]\n", "\n", " return requests.get(f\"{base_url}/model-versions/{version_id}\").json()\n", " except (KeyError, IndexError, requests.RequestException) as e:\n", " return None\n", "\n", " data = get_model_data(url)\n", "\n", " if not data:\n", " print(\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\")\n", " return 'None', None, None, None, None, None, None\n", "\n", " def get_model_info(url, data):\n", " model_type = data['model']['type']\n", " model_name = data['files'][0]['name']\n", "\n", " if 'type=' in url:\n", " url_model_type = parse_qs(urlparse(url).query).get('type', [''])[0].lower()\n", " if 'vae' in url_model_type:\n", " model_type = data['files'][1]['type']\n", " model_name = data['files'][1]['name']\n", "\n", " if file_name and '.' not in file_name:\n", " file_extension = model_name.split('.')[-1]\n", " model_name = f\"{file_name}.{file_extension}\"\n", " elif file_name:\n", " model_name = file_name\n", "\n", " return model_type, model_name\n", "\n", " def get_download_url(data, model_type):\n", " if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):\n", " return data['files'][0]['downloadUrl']\n", "\n", " return data['files'][1]['downloadUrl'] if 'type' in url else data['files'][0]['downloadUrl']\n", "\n", " def get_image_info(data, model_type, model_name):\n", " if not any(t in model_type for t in SUPPORT_TYPES):\n", " return None, None\n", "\n", " for image in data.get('images', []):\n", " if image['nsfwLevel'] >= 4 and env == 'Kaggle': # Filter NSFW images for Kaggle\n", " continue\n", " image_url = image['url']\n", " image_extension = image_url.split('.')[-1]\n", " image_name = f\"{model_name.split('.')[0]}.preview.{image_extension}\" if image_url else None\n", " return image_url, image_name\n", " return None, None\n", "\n", " model_type, model_name = get_model_info(url, data)\n", " download_url = get_download_url(data, model_type)\n", " image_url, image_name = get_image_info(data, model_type, model_name)\n", "\n", " return f\"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}\", download_url, model_type, model_name, image_url, image_name, data\n", "\n", "''' Main Download Code '''\n", "\n", "def strip_(url):\n", " if 'github.com' in url:\n", " return url.replace('/blob/', '/raw/')\n", " elif \"huggingface.co\" in url:\n", " url = url.replace('/blob/', '/resolve/')\n", " return url.split('?')[0] if '?' in url else url\n", " return url\n", "\n", "def download(url):\n", " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n", "\n", " for link_or_path in links_and_paths:\n", " if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):\n", " handle_manual(link_or_path)\n", " else:\n", " url, dst_dir, file_name = link_or_path.split()\n", " manual_download(url, dst_dir, file_name)\n", "\n", " # Unpuck ZIPs Files\n", " for directory in PREFIXES.values():\n", " for root, _, files in os.walk(directory):\n", " for file in files:\n", " if file.endswith(\".zip\"):\n", " zip_path = os.path.join(root, file)\n", " extract_path = os.path.splitext(zip_path)[0]\n", " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n", " zip_ref.extractall(extract_path)\n", " os.remove(zip_path)\n", "\n", "def handle_manual(url):\n", " url_parts = url.split(':', 1)\n", " prefix, path = url_parts[0], url_parts[1]\n", "\n", " file_name_match = re.search(r'\\[(.*?)\\]', path)\n", " file_name = file_name_match.group(1) if file_name_match else None\n", " if file_name:\n", " path = re.sub(r'\\[.*?\\]', '', path)\n", "\n", " if prefix in PREFIXES:\n", " dir = PREFIXES[prefix]\n", " if prefix != \"extension\":\n", " try:\n", " manual_download(path, dir, file_name=file_name, prefix=prefix)\n", " except Exception as e:\n", " print(f\"Error downloading file: {e}\")\n", " else:\n", " extension_repo.append((path, file_name))\n", "\n", "def manual_download(url, dst_dir, file_name, prefix=None):\n", " hf_header = f\"--header='Authorization: Bearer {huggingface_token}'\" if huggingface_token else \"\"\n", " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n", " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n", "\n", " clean_url = strip_(url)\n", "\n", " if 'civitai' in url:\n", " url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)\n", " if image_url and image_name:\n", " command = [\"aria2c\"] + aria2_args.split() + [\"-d\", dst_dir, \"-o\", image_name, image_url]\n", " subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n", "\n", " elif 'github' in url or 'huggingface.co' in url:\n", " if file_name and '.' not in file_name:\n", " file_extension = f\"{clean_url.split('/')[-1].split('.', 1)[1]}\"\n", " file_name = f\"{file_name}.{file_extension}\"\n", " if not file_name:\n", " file_name = clean_url.split(\"/\")[-1]\n", "\n", " \"\"\" Formatted info output \"\"\"\n", " try:\n", " format_output(clean_url, dst_dir, file_name, image_name, image_url)\n", " except UnboundLocalError:\n", " format_output(clean_url, dst_dir, file_name, None, None)\n", "\n", " # =====================\n", " def run_aria2c(url, dst_dir, file_name=None, args=\"\", header=\"\"):\n", " file_path = os.path.join(dst_dir, file_name) # replaces config files\n", " if os.path.exists(file_path) and prefix == 'config':\n", " os.remove(file_path)\n", "\n", " out = f\"-o '{file_name}'\" if file_name else \"\"\n", " !aria2c {header} {args} -d {dst_dir} {out} '{url}'\n", "\n", " # -- Google Drive --\n", " if 'drive.google' in url:\n", " if not globals().get('have_drive_link', False):\n", " os.system(\"pip install -U gdown > /dev/null\")\n", " globals()['have_drive_link'] = True\n", "\n", " if 'folders' in url:\n", " os.system(f\"gdown --folder \\\"{url}\\\" -O {dst_dir} --fuzzy -c\")\n", " else:\n", " out_path = f\"{dst_dir}/{file_name}\" if file_name else dst_dir\n", " os.system(f\"gdown \\\"{url}\\\" -O {out_path} --fuzzy -c\")\n", "\n", " # -- GitHub or Hugging Face --\n", " elif 'github' in url or 'huggingface' in url:\n", " run_aria2c(clean_url, dst_dir, file_name, aria2_args, hf_header if 'huggingface' in url else '')\n", "\n", " # -- Other HTTP/Sources --\n", " elif 'http' in url:\n", " run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)\n", "\n", "''' SubModels - Added URLs '''\n", "\n", "# Separation of merged numbers\n", "def split_numbers(num_str, max_num):\n", " result = []\n", " i = 0\n", " while i < len(num_str):\n", " found = False\n", " for length in range(2, 0, -1):\n", " if i + length <= len(num_str):\n", " part = int(num_str[i:i + length])\n", " if part <= max_num:\n", " result.append(part)\n", " i += length\n", " found = True\n", " break\n", " if not found:\n", " break\n", " return result\n", "\n", "def add_submodels(selection, num_selection, model_dict, dst_dir):\n", " if selection == \"none\":\n", " return []\n", " selected_models = []\n", "\n", " if selection == \"ALL\":\n", " selected_models = sum(model_dict.values(), [])\n", " else:\n", " if selection in model_dict:\n", " selected_models.extend(model_dict[selection])\n", "\n", " nums = num_selection.replace(',', ' ').split()\n", " max_num = len(model_dict)\n", " unique_nums = set()\n", "\n", " for num_part in nums:\n", " split_nums = split_numbers(num_part, max_num)\n", " unique_nums.update(split_nums)\n", "\n", " for num in unique_nums:\n", " if 1 <= num <= max_num:\n", " name = list(model_dict.keys())[num - 1]\n", " selected_models.extend(model_dict[name])\n", "\n", " unique_models = {model['name']: model for model in selected_models}.values()\n", "\n", " for model in unique_models:\n", " model['dst_dir'] = dst_dir\n", "\n", " return list(unique_models)\n", "\n", "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n", " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n", " for submodel in submodels:\n", " if not inpainting_model and \"inpainting\" in submodel['name']:\n", " continue\n", " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n", " return url\n", "\n", "url = \"\"\n", "url = handle_submodels(model, model_num, model_list, models_dir, url)\n", "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n", "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n", "\n", "''' file.txt - added urls '''\n", "\n", "def process_file_download(file_url, PREFIXES, unique_urls):\n", " files_urls = \"\"\n", "\n", " if file_url.startswith(\"http\"):\n", " if \"blob\" in file_url:\n", " file_url = file_url.replace(\"blob\", \"raw\")\n", " response = requests.get(file_url)\n", " lines = response.text.split('\\n')\n", " else:\n", " with open(file_url, 'r') as file:\n", " lines = file.readlines()\n", "\n", " current_tag = None\n", " for line in lines:\n", " line = line.strip()\n", " if any(f'# {tag}' in line.lower() for tag in PREFIXES):\n", " current_tag = next((tag for tag in PREFIXES if tag in line.lower()))\n", "\n", " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n", " for url in urls:\n", " filter_url = url.split('[')[0] # same url filter\n", "\n", " if url.startswith(\"http\") and filter_url not in unique_urls:\n", " files_urls += f\"{current_tag}:{url}, \"\n", " unique_urls.add(filter_url)\n", "\n", " return files_urls\n", "\n", "file_urls = \"\"\n", "unique_urls = set()\n", "\n", "if custom_file_urls:\n", " for custom_file_url in custom_file_urls.replace(',', '').split():\n", " if not custom_file_url.endswith('.txt'):\n", " custom_file_url += '.txt'\n", " if not custom_file_url.startswith('http'):\n", " if not custom_file_url.startswith(root_path):\n", " custom_file_url = f'{root_path}/{custom_file_url}'\n", "\n", " try:\n", " file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)\n", " except FileNotFoundError:\n", " pass\n", "\n", "# url prefixing\n", "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n", "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())\n", "url += \", \".join(prefixed_urls) + \", \" + file_urls\n", "\n", "if detailed_download == \"on\":\n", " print(\"\\n\\n\\033[33m# ====== Подробная Загрузка ====== #\\n\\033[0m\")\n", " download(url)\n", " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n", "else:\n", " with capture.capture_output():\n", " download(url)\n", "\n", "print(\"\\r🏁 Скачивание Завершено!\" + \" \"*15)\n", "\n", "\n", "# Cleaning shit after downloading...\n", "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n", "\n", "\n", "## Install of Custom extensions\n", "if len(extension_repo) > 0:\n", " print(\"✨ Установка кастомных расширений...\", end='')\n", " with capture.capture_output():\n", " for repo, repo_name in extension_repo:\n", " if not repo_name:\n", " repo_name = repo.split('/')[-1]\n", " !cd {extensions_dir} \\\n", " && git clone {repo} {repo_name} \\\n", " && cd {repo_name} \\\n", " && git fetch\n", " print(f\"\\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!\")\n", "\n", "\n", "## List Models and stuff V2\n", "if detailed_download == \"off\":\n", " print(\"\\n\\n\\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.\")\n", "\n", "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result" ] } ], "metadata": { "colab": { "provenance": [] }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }