NagisaNao commited on
Commit
12ae06e
1 Parent(s): f1959f1

update download code

Browse files
files_cells/notebooks/en/auto-cleaner_en.ipynb CHANGED
@@ -252,13 +252,18 @@
252
  "\"\"\" functions \"\"\"\n",
253
  "def clean_directory(directory):\n",
254
  " deleted_files = 0\n",
 
 
255
  " for root, dirs, files in os.walk(directory):\n",
256
  " for file in files:\n",
 
 
257
  " if file.endswith(\".txt\"):\n",
258
  " continue\n",
259
- " os.remove(os.path.join(root, file))\n",
260
- " if file.endswith((\".safetensors\", \".pt\")):\n",
261
  " deleted_files += 1\n",
 
 
262
  " return deleted_files\n",
263
  "\n",
264
  "def update_memory_info():\n",
@@ -268,7 +273,7 @@
268
  " free = disk_space.free / (1024 ** 3)\n",
269
  "\n",
270
  " storage_info.value = f'''\n",
271
- " <div class=\"storage_info_AC\">Всего: {total:.2f} GB <span style=\"color: #555\">|</span> Используется: {used:.2f} GB <span style=\"color: #555\">|</span> Свободно: {free:.2f} GB</div>\n",
272
  " '''\n",
273
  "\n",
274
  "def on_execute_button_press(button):\n",
 
252
  "\"\"\" functions \"\"\"\n",
253
  "def clean_directory(directory):\n",
254
  " deleted_files = 0\n",
255
+ " image_dir = directories['Images']\n",
256
+ "\n",
257
  " for root, dirs, files in os.walk(directory):\n",
258
  " for file in files:\n",
259
+ " file_path = os.path.join(root, file)\n",
260
+ "\n",
261
  " if file.endswith(\".txt\"):\n",
262
  " continue\n",
263
+ " if file.endswith((\".safetensors\", \".pt\")) or root == image_dir: # fix for image counter\n",
 
264
  " deleted_files += 1\n",
265
+ "\n",
266
+ " os.remove(file_path)\n",
267
  " return deleted_files\n",
268
  "\n",
269
  "def update_memory_info():\n",
 
273
  " free = disk_space.free / (1024 ** 3)\n",
274
  "\n",
275
  " storage_info.value = f'''\n",
276
+ " <div class=\"storage_info_AC\">Total storage: {total:.2f} GB <span style=\"color: #555\">|</span> Used: {used:.2f} GB <span style=\"color: #555\">|</span> Free: {free:.2f} GB</div>\n",
277
  " '''\n",
278
  "\n",
279
  "def on_execute_button_press(button):\n",
files_cells/notebooks/en/downloading_en.ipynb CHANGED
@@ -58,33 +58,31 @@
58
  " print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
59
  "\n",
60
  " install_lib = {\n",
61
- " \"gdown\": \"pip install -U gdown\",\n",
62
- " \"aria2\": \"apt-get update && apt -y install aria2\",\n",
63
- " \"localtunnel\": \"npm install -g localtunnel &> /dev/null\",\n",
64
  " \"insightface\": \"pip install insightface\",\n",
65
  " }\n",
66
  "\n",
67
- " # Dictionary of additional libraries specific to certain environments\n",
68
  " additional_libs = {\n",
69
  " \"Google Colab\": {\n",
70
  " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
71
  " },\n",
72
  " \"Kaggle\": {\n",
73
- " \"xformers\": \"pip install -q xformers==0.0.26.post1\",\n",
74
- " # \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
 
75
  " }\n",
76
  " }\n",
77
  "\n",
78
- " # If the current environment has additional libraries, update the install_lib dictionary\n",
79
  " if env in additional_libs:\n",
80
  " install_lib.update(additional_libs[env])\n",
81
  "\n",
82
- " # Loop through libraries and execute install commands\n",
83
  " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
84
  " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
85
  " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
86
  "\n",
87
- " # Additional manual installation steps for specific packages\n",
88
  " with capture.capture_output() as cap:\n",
89
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
90
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
@@ -397,7 +395,7 @@
397
  " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
398
  " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
399
  "\n",
400
- " print(f\"\\n\\033[32m{'---'*45}\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}033[0m\")\n",
401
  " # print(url, dst_dir, file_name)\n",
402
  "\n",
403
  " # === CivitAi API ===\n",
@@ -420,6 +418,12 @@
420
  "\n",
421
  " # -- GDrive --\n",
422
  " if 'drive.google' in url:\n",
 
 
 
 
 
 
423
  " if 'folders' in url:\n",
424
  " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
425
  " else:\n",
@@ -435,12 +439,8 @@
435
  " !aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}\n",
436
  "\n",
437
  " # -- Other --\n",
438
- " elif 'http' in url or 'magnet' in url:\n",
439
- " if file_name:\n",
440
- " !aria2c {aria2_args} -d {dst_dir} -o {file_name} {url}\n",
441
- " else:\n",
442
- " parsed_link = '\"{}\"'.format(url)\n",
443
- " !aria2c {aria2_args} -d {dst_dir} -Z {parsed_link}\n",
444
  "\n",
445
  "def download(url):\n",
446
  " links_and_paths = url.split(',')\n",
 
58
  " print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
59
  "\n",
60
  " install_lib = {\n",
61
+ " \"aria2\": \"apt -y install aria2\",\n",
62
+ " \"localtunnel\": \"npm install -g localtunnel\",\n",
 
63
  " \"insightface\": \"pip install insightface\",\n",
64
  " }\n",
65
  "\n",
 
66
  " additional_libs = {\n",
67
  " \"Google Colab\": {\n",
68
  " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
69
  " },\n",
70
  " \"Kaggle\": {\n",
71
+ " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
72
+ " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
73
+ " \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
74
  " }\n",
75
  " }\n",
76
  "\n",
 
77
  " if env in additional_libs:\n",
78
  " install_lib.update(additional_libs[env])\n",
79
  "\n",
80
+ " # Loop through libraries\n",
81
  " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
82
  " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
83
  " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
84
  "\n",
85
+ " # Additional specific packages\n",
86
  " with capture.capture_output() as cap:\n",
87
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
88
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
 
395
  " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
396
  " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
397
  "\n",
398
+ " print(f\"\\n\\033[32m{'---'*45}\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
399
  " # print(url, dst_dir, file_name)\n",
400
  "\n",
401
  " # === CivitAi API ===\n",
 
418
  "\n",
419
  " # -- GDrive --\n",
420
  " if 'drive.google' in url:\n",
421
+ " try:\n",
422
+ " have_drive_link\n",
423
+ " except:\n",
424
+ " !pip install -U gdown > /dev/null\n",
425
+ " have_drive_link = True\n",
426
+ "\n",
427
  " if 'folders' in url:\n",
428
  " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
429
  " else:\n",
 
439
  " !aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}\n",
440
  "\n",
441
  " # -- Other --\n",
442
+ " elif 'http' in url:\n",
443
+ " !aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} {url}\n",
 
 
 
 
444
  "\n",
445
  "def download(url):\n",
446
  " links_and_paths = url.split(',')\n",
files_cells/notebooks/en/launch_en.ipynb CHANGED
@@ -29,7 +29,9 @@
29
  "import time\n",
30
  "import json\n",
31
  "import requests\n",
 
32
  "from datetime import timedelta\n",
 
33
  "\n",
34
  "\n",
35
  "# ================= DETECT ENV =================\n",
@@ -62,7 +64,7 @@
62
  "\n",
63
  "\n",
64
  "# ======================== TUNNEL ========================\n",
65
- "import cloudpickle as pickle\n",
66
  "\n",
67
  "def get_public_ip(version='ipv4'):\n",
68
  " try:\n",
@@ -77,7 +79,7 @@
77
  "public_ipv4 = get_public_ip(version='ipv4')\n",
78
  "\n",
79
  "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
80
- "tunnel_port= 1769\n",
81
  "tunnel = tunnel_class(tunnel_port)\n",
82
  "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
83
  "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
@@ -86,31 +88,38 @@
86
  "if zrok_token:\n",
87
  " get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
88
  " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
 
 
89
  "# ======================== TUNNEL ========================\n",
90
  "\n",
91
  "\n",
92
- "# automatic fixing path V2\n",
93
  "!sed -i 's|\"tagger_hf_cache_dir\": \".*\"|\"tagger_hf_cache_dir\": \"{webui_path}/models/interrogators/\"|' {webui_path}/config.json\n",
94
  "!sed -i 's|\"additional_networks_extra_lora_path\": \".*\"|\"additional_networks_extra_lora_path\": \"{webui_path}/models/Lora/\"|' {webui_path}/config.json\n",
95
  "!sed -i 's|\"ad_extra_models_dir\": \".*\"|\"ad_extra_models_dir\": \"{webui_path}/models/adetailer/\"|' {webui_path}/config.json\n",
96
- "# ---\n",
97
  "!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
98
  "\n",
99
  "\n",
100
  "with tunnel:\n",
101
  " %cd {webui_path}\n",
102
- " commandline_arguments += f\" --port=1769\"\n",
103
  "\n",
 
104
  " if ngrok_token:\n",
105
- " commandline_arguments += ' --ngrok ' + ngrok_token\n",
106
  " if env != \"Google Colab\":\n",
107
- " commandline_arguments += f\" --encrypt-pass=1769 --api\"\n",
108
  "\n",
109
  " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
110
  "\n",
111
- " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
112
- " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
113
- " print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")"
 
 
 
 
 
 
114
  ]
115
  }
116
  ]
 
29
  "import time\n",
30
  "import json\n",
31
  "import requests\n",
32
+ "import cloudpickle as pickle\n",
33
  "from datetime import timedelta\n",
34
+ "from IPython.display import clear_output\n",
35
  "\n",
36
  "\n",
37
  "# ================= DETECT ENV =================\n",
 
64
  "\n",
65
  "\n",
66
  "# ======================== TUNNEL ========================\n",
67
+ "print('Please Wait...')\n",
68
  "\n",
69
  "def get_public_ip(version='ipv4'):\n",
70
  " try:\n",
 
79
  "public_ipv4 = get_public_ip(version='ipv4')\n",
80
  "\n",
81
  "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
82
+ "tunnel_port= 1734\n",
83
  "tunnel = tunnel_class(tunnel_port)\n",
84
  "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
85
  "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
 
88
  "if zrok_token:\n",
89
  " get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
90
  " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
91
+ "\n",
92
+ "clear_output()\n",
93
  "# ======================== TUNNEL ========================\n",
94
  "\n",
95
  "\n",
96
+ " # automatic fixing path V2\n",
97
  "!sed -i 's|\"tagger_hf_cache_dir\": \".*\"|\"tagger_hf_cache_dir\": \"{webui_path}/models/interrogators/\"|' {webui_path}/config.json\n",
98
  "!sed -i 's|\"additional_networks_extra_lora_path\": \".*\"|\"additional_networks_extra_lora_path\": \"{webui_path}/models/Lora/\"|' {webui_path}/config.json\n",
99
  "!sed -i 's|\"ad_extra_models_dir\": \".*\"|\"ad_extra_models_dir\": \"{webui_path}/models/adetailer/\"|' {webui_path}/config.json\n",
 
100
  "!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
101
  "\n",
102
  "\n",
103
  "with tunnel:\n",
104
  " %cd {webui_path}\n",
 
105
  "\n",
106
+ " commandline_arguments += f' --port={tunnel_port}'\n",
107
  " if ngrok_token:\n",
108
+ " commandline_arguments += f' --ngrok {ngrok_token}'\n",
109
  " if env != \"Google Colab\":\n",
110
+ " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
111
  "\n",
112
  " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
113
  "\n",
114
+ "\n",
115
+ "# after runnig\n",
116
+ "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
117
+ "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
118
+ "print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
119
+ "\n",
120
+ "''' del zrok tunnel '''\n",
121
+ "if zrok_token:\n",
122
+ " !zrok disable &> /dev/null"
123
  ]
124
  }
125
  ]
files_cells/notebooks/ru/auto-cleaner_ru.ipynb CHANGED
@@ -252,13 +252,18 @@
252
  "\"\"\" functions \"\"\"\n",
253
  "def clean_directory(directory):\n",
254
  " deleted_files = 0\n",
 
 
255
  " for root, dirs, files in os.walk(directory):\n",
256
  " for file in files:\n",
 
 
257
  " if file.endswith(\".txt\"):\n",
258
  " continue\n",
259
- " os.remove(os.path.join(root, file))\n",
260
- " if file.endswith((\".safetensors\", \".pt\")):\n",
261
  " deleted_files += 1\n",
 
 
262
  " return deleted_files\n",
263
  "\n",
264
  "def update_memory_info():\n",
 
252
  "\"\"\" functions \"\"\"\n",
253
  "def clean_directory(directory):\n",
254
  " deleted_files = 0\n",
255
+ " image_dir = directories['Изображения']\n",
256
+ "\n",
257
  " for root, dirs, files in os.walk(directory):\n",
258
  " for file in files:\n",
259
+ " file_path = os.path.join(root, file)\n",
260
+ "\n",
261
  " if file.endswith(\".txt\"):\n",
262
  " continue\n",
263
+ " if file.endswith((\".safetensors\", \".pt\")) or root == image_dir: # fix for image counter\n",
 
264
  " deleted_files += 1\n",
265
+ "\n",
266
+ " os.remove(file_path)\n",
267
  " return deleted_files\n",
268
  "\n",
269
  "def update_memory_info():\n",
files_cells/notebooks/ru/downloading_ru.ipynb CHANGED
@@ -58,33 +58,31 @@
58
  " print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n",
59
  "\n",
60
  " install_lib = {\n",
61
- " \"gdown\": \"pip install -U gdown\",\n",
62
- " \"aria2\": \"apt-get update && apt -y install aria2\",\n",
63
- " \"localtunnel\": \"npm install -g localtunnel &> /dev/null\",\n",
64
  " \"insightface\": \"pip install insightface\",\n",
65
  " }\n",
66
  "\n",
67
- " # Dictionary of additional libraries specific to certain environments\n",
68
  " additional_libs = {\n",
69
  " \"Google Colab\": {\n",
70
  " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
71
  " },\n",
72
  " \"Kaggle\": {\n",
73
- " \"xformers\": \"pip install -q xformers==0.0.26.post1\",\n",
74
- " # \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
 
75
  " }\n",
76
  " }\n",
77
  "\n",
78
- " # If the current environment has additional libraries, update the install_lib dictionary\n",
79
  " if env in additional_libs:\n",
80
  " install_lib.update(additional_libs[env])\n",
81
  "\n",
82
- " # Loop through libraries and execute install commands\n",
83
  " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
84
  " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
85
  " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
86
  "\n",
87
- " # Additional manual installation steps for specific packages\n",
88
  " with capture.capture_output() as cap:\n",
89
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
90
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
@@ -397,7 +395,7 @@
397
  " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
398
  " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
399
  "\n",
400
- " print(f\"\\n\\033[32m{'---'*45}\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}033[0m\")\n",
401
  " # print(url, dst_dir, file_name)\n",
402
  "\n",
403
  " # === CivitAi API ===\n",
@@ -420,6 +418,12 @@
420
  "\n",
421
  " # -- GDrive --\n",
422
  " if 'drive.google' in url:\n",
 
 
 
 
 
 
423
  " if 'folders' in url:\n",
424
  " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
425
  " else:\n",
@@ -435,12 +439,8 @@
435
  " !aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}\n",
436
  "\n",
437
  " # -- Other --\n",
438
- " elif 'http' in url or 'magnet' in url:\n",
439
- " if file_name:\n",
440
- " !aria2c {aria2_args} -d {dst_dir} -o {file_name} {url}\n",
441
- " else:\n",
442
- " parsed_link = '\"{}\"'.format(url)\n",
443
- " !aria2c {aria2_args} -d {dst_dir} -Z {parsed_link}\n",
444
  "\n",
445
  "def download(url):\n",
446
  " links_and_paths = url.split(',')\n",
 
58
  " print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n",
59
  "\n",
60
  " install_lib = {\n",
61
+ " \"aria2\": \"apt -y install aria2\",\n",
62
+ " \"localtunnel\": \"npm install -g localtunnel\",\n",
 
63
  " \"insightface\": \"pip install insightface\",\n",
64
  " }\n",
65
  "\n",
 
66
  " additional_libs = {\n",
67
  " \"Google Colab\": {\n",
68
  " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
69
  " },\n",
70
  " \"Kaggle\": {\n",
71
+ " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
72
+ " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
73
+ " \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
74
  " }\n",
75
  " }\n",
76
  "\n",
 
77
  " if env in additional_libs:\n",
78
  " install_lib.update(additional_libs[env])\n",
79
  "\n",
80
+ " # Loop through libraries\n",
81
  " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
82
  " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
83
  " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
84
  "\n",
85
+ " # Additional specific packages\n",
86
  " with capture.capture_output() as cap:\n",
87
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
88
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
 
395
  " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
396
  " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
397
  "\n",
398
+ " print(f\"\\n\\033[32m{'---'*45}\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
399
  " # print(url, dst_dir, file_name)\n",
400
  "\n",
401
  " # === CivitAi API ===\n",
 
418
  "\n",
419
  " # -- GDrive --\n",
420
  " if 'drive.google' in url:\n",
421
+ " try:\n",
422
+ " have_drive_link\n",
423
+ " except:\n",
424
+ " !pip install -U gdown > /dev/null\n",
425
+ " have_drive_link = True\n",
426
+ "\n",
427
  " if 'folders' in url:\n",
428
  " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
429
  " else:\n",
 
439
  " !aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}\n",
440
  "\n",
441
  " # -- Other --\n",
442
+ " elif 'http' in url:\n",
443
+ " !aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} {url}\n",
 
 
 
 
444
  "\n",
445
  "def download(url):\n",
446
  " links_and_paths = url.split(',')\n",
files_cells/notebooks/ru/launch_ru.ipynb CHANGED
@@ -29,7 +29,9 @@
29
  "import time\n",
30
  "import json\n",
31
  "import requests\n",
 
32
  "from datetime import timedelta\n",
 
33
  "\n",
34
  "\n",
35
  "# ================= DETECT ENV =================\n",
@@ -62,7 +64,7 @@
62
  "\n",
63
  "\n",
64
  "# ======================== TUNNEL ========================\n",
65
- "import cloudpickle as pickle\n",
66
  "\n",
67
  "def get_public_ip(version='ipv4'):\n",
68
  " try:\n",
@@ -77,7 +79,7 @@
77
  "public_ipv4 = get_public_ip(version='ipv4')\n",
78
  "\n",
79
  "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
80
- "tunnel_port= 1769\n",
81
  "tunnel = tunnel_class(tunnel_port)\n",
82
  "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
83
  "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
@@ -86,6 +88,8 @@
86
  "if zrok_token:\n",
87
  " get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
88
  " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
 
 
89
  "# ======================== TUNNEL ========================\n",
90
  "\n",
91
  "\n",
@@ -93,24 +97,29 @@
93
  "!sed -i 's|\"tagger_hf_cache_dir\": \".*\"|\"tagger_hf_cache_dir\": \"{webui_path}/models/interrogators/\"|' {webui_path}/config.json\n",
94
  "!sed -i 's|\"additional_networks_extra_lora_path\": \".*\"|\"additional_networks_extra_lora_path\": \"{webui_path}/models/Lora/\"|' {webui_path}/config.json\n",
95
  "!sed -i 's|\"ad_extra_models_dir\": \".*\"|\"ad_extra_models_dir\": \"{webui_path}/models/adetailer/\"|' {webui_path}/config.json\n",
96
- "# ---\n",
97
  "!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
98
  "\n",
99
  "\n",
100
  "with tunnel:\n",
101
  " %cd {webui_path}\n",
102
- " commandline_arguments += f\" --port=1769\"\n",
103
  "\n",
 
104
  " if ngrok_token:\n",
105
- " commandline_arguments += ' --ngrok ' + ngrok_token\n",
106
  " if env != \"Google Colab\":\n",
107
- " commandline_arguments += f\" --encrypt-pass=1769 --api\"\n",
108
  "\n",
109
  " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
110
  "\n",
111
- " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
112
- " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
113
- " print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")"
 
 
 
 
 
 
114
  ]
115
  }
116
  ]
 
29
  "import time\n",
30
  "import json\n",
31
  "import requests\n",
32
+ "import cloudpickle as pickle\n",
33
  "from datetime import timedelta\n",
34
+ "from IPython.display import clear_output\n",
35
  "\n",
36
  "\n",
37
  "# ================= DETECT ENV =================\n",
 
64
  "\n",
65
  "\n",
66
  "# ======================== TUNNEL ========================\n",
67
+ "print('Please Wait...')\n",
68
  "\n",
69
  "def get_public_ip(version='ipv4'):\n",
70
  " try:\n",
 
79
  "public_ipv4 = get_public_ip(version='ipv4')\n",
80
  "\n",
81
  "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
82
+ "tunnel_port= 1734\n",
83
  "tunnel = tunnel_class(tunnel_port)\n",
84
  "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
85
  "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
 
88
  "if zrok_token:\n",
89
  " get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
90
  " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
91
+ "\n",
92
+ "clear_output()\n",
93
  "# ======================== TUNNEL ========================\n",
94
  "\n",
95
  "\n",
 
97
  "!sed -i 's|\"tagger_hf_cache_dir\": \".*\"|\"tagger_hf_cache_dir\": \"{webui_path}/models/interrogators/\"|' {webui_path}/config.json\n",
98
  "!sed -i 's|\"additional_networks_extra_lora_path\": \".*\"|\"additional_networks_extra_lora_path\": \"{webui_path}/models/Lora/\"|' {webui_path}/config.json\n",
99
  "!sed -i 's|\"ad_extra_models_dir\": \".*\"|\"ad_extra_models_dir\": \"{webui_path}/models/adetailer/\"|' {webui_path}/config.json\n",
 
100
  "!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
101
  "\n",
102
  "\n",
103
  "with tunnel:\n",
104
  " %cd {webui_path}\n",
 
105
  "\n",
106
+ " commandline_arguments += f' --port={tunnel_port}'\n",
107
  " if ngrok_token:\n",
108
+ " commandline_arguments += f' --ngrok {ngrok_token}'\n",
109
  " if env != \"Google Colab\":\n",
110
+ " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
111
  "\n",
112
  " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
113
  "\n",
114
+ "\n",
115
+ "# after runnig\n",
116
+ "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
117
+ "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
118
+ "print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
119
+ "\n",
120
+ "''' del zrok tunnel '''\n",
121
+ "if zrok_token:\n",
122
+ " !zrok disable &> /dev/null"
123
  ]
124
  }
125
  ]
files_cells/python/en/auto-cleaner_en.py CHANGED
@@ -233,13 +233,18 @@ directories = {
233
  """ functions """
234
  def clean_directory(directory):
235
  deleted_files = 0
 
 
236
  for root, dirs, files in os.walk(directory):
237
  for file in files:
 
 
238
  if file.endswith(".txt"):
239
  continue
240
- os.remove(os.path.join(root, file))
241
- if file.endswith((".safetensors", ".pt")):
242
  deleted_files += 1
 
 
243
  return deleted_files
244
 
245
  def update_memory_info():
@@ -249,7 +254,7 @@ def update_memory_info():
249
  free = disk_space.free / (1024 ** 3)
250
 
251
  storage_info.value = f'''
252
- <div class="storage_info_AC">Всего: {total:.2f} GB <span style="color: #555">|</span> Используется: {used:.2f} GB <span style="color: #555">|</span> Свободно: {free:.2f} GB</div>
253
  '''
254
 
255
  def on_execute_button_press(button):
 
233
  """ functions """
234
  def clean_directory(directory):
235
  deleted_files = 0
236
+ image_dir = directories['Images']
237
+
238
  for root, dirs, files in os.walk(directory):
239
  for file in files:
240
+ file_path = os.path.join(root, file)
241
+
242
  if file.endswith(".txt"):
243
  continue
244
+ if file.endswith((".safetensors", ".pt")) or root == image_dir: # fix for image counter
 
245
  deleted_files += 1
246
+
247
+ os.remove(file_path)
248
  return deleted_files
249
 
250
  def update_memory_info():
 
254
  free = disk_space.free / (1024 ** 3)
255
 
256
  storage_info.value = f'''
257
+ <div class="storage_info_AC">Total storage: {total:.2f} GB <span style="color: #555">|</span> Used: {used:.2f} GB <span style="color: #555">|</span> Free: {free:.2f} GB</div>
258
  '''
259
 
260
  def on_execute_button_press(button):
files_cells/python/en/downloading_en.py CHANGED
@@ -39,33 +39,31 @@ if not os.path.exists(flag_file):
39
  print("💿 Installing the libraries, it's going to take a while:\n")
40
 
41
  install_lib = {
42
- "gdown": "pip install -U gdown",
43
- "aria2": "apt-get update && apt -y install aria2",
44
- "localtunnel": "npm install -g localtunnel &> /dev/null",
45
  "insightface": "pip install insightface",
46
  }
47
 
48
- # Dictionary of additional libraries specific to certain environments
49
  additional_libs = {
50
  "Google Colab": {
51
  "xformers": "pip install xformers==0.0.26.post1 --no-deps"
52
  },
53
  "Kaggle": {
54
- "xformers": "pip install -q xformers==0.0.26.post1",
55
- # "torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
 
56
  }
57
  }
58
 
59
- # If the current environment has additional libraries, update the install_lib dictionary
60
  if env in additional_libs:
61
  install_lib.update(additional_libs[env])
62
 
63
- # Loop through libraries and execute install commands
64
  for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
65
  print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
66
  subprocess.run(install_cmd, shell=True, capture_output=True)
67
 
68
- # Additional manual installation steps for specific packages
69
  with capture.capture_output() as cap:
70
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
71
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
@@ -378,7 +376,7 @@ def manual_download(url, dst_dir, file_name):
378
  basename = url.split("/")[-1] if file_name is None else file_name
379
  aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
380
 
381
- print(f"\n\033[32m{'---'*45}\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}033[0m")
382
  # print(url, dst_dir, file_name)
383
 
384
  # === CivitAi API ===
@@ -401,6 +399,12 @@ def manual_download(url, dst_dir, file_name):
401
 
402
  # -- GDrive --
403
  if 'drive.google' in url:
 
 
 
 
 
 
404
  if 'folders' in url:
405
  get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
406
  else:
@@ -416,12 +420,8 @@ def manual_download(url, dst_dir, file_name):
416
  get_ipython().system('aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}')
417
 
418
  # -- Other --
419
- elif 'http' in url or 'magnet' in url:
420
- if file_name:
421
- get_ipython().system('aria2c {aria2_args} -d {dst_dir} -o {file_name} {url}')
422
- else:
423
- parsed_link = '"{}"'.format(url)
424
- get_ipython().system('aria2c {aria2_args} -d {dst_dir} -Z {parsed_link}')
425
 
426
  def download(url):
427
  links_and_paths = url.split(',')
 
39
  print("💿 Installing the libraries, it's going to take a while:\n")
40
 
41
  install_lib = {
42
+ "aria2": "apt -y install aria2",
43
+ "localtunnel": "npm install -g localtunnel",
 
44
  "insightface": "pip install insightface",
45
  }
46
 
 
47
  additional_libs = {
48
  "Google Colab": {
49
  "xformers": "pip install xformers==0.0.26.post1 --no-deps"
50
  },
51
  "Kaggle": {
52
+ "xformers": "pip install xformers==0.0.26.post1",
53
+ # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
54
+ "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
55
  }
56
  }
57
 
 
58
  if env in additional_libs:
59
  install_lib.update(additional_libs[env])
60
 
61
+ # Loop through libraries
62
  for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
63
  print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
64
  subprocess.run(install_cmd, shell=True, capture_output=True)
65
 
66
+ # Additional specific packages
67
  with capture.capture_output() as cap:
68
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
69
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
 
376
  basename = url.split("/")[-1] if file_name is None else file_name
377
  aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
378
 
379
+ print(f"\n\033[32m{'---'*45}\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[0m")
380
  # print(url, dst_dir, file_name)
381
 
382
  # === CivitAi API ===
 
399
 
400
  # -- GDrive --
401
  if 'drive.google' in url:
402
+ try:
403
+ have_drive_link
404
+ except:
405
+ get_ipython().system('pip install -U gdown > /dev/null')
406
+ have_drive_link = True
407
+
408
  if 'folders' in url:
409
  get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
410
  else:
 
420
  get_ipython().system('aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}')
421
 
422
  # -- Other --
423
+ elif 'http' in url:
424
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} {url}")
 
 
 
 
425
 
426
  def download(url):
427
  links_and_paths = url.split(',')
files_cells/python/en/launch_en.py CHANGED
@@ -5,7 +5,9 @@ import re
5
  import time
6
  import json
7
  import requests
 
8
  from datetime import timedelta
 
9
 
10
 
11
  # ================= DETECT ENV =================
@@ -38,7 +40,7 @@ commandline_arguments = settings['commandline_arguments']
38
 
39
 
40
  # ======================== TUNNEL ========================
41
- import cloudpickle as pickle
42
 
43
  def get_public_ip(version='ipv4'):
44
  try:
@@ -53,7 +55,7 @@ def get_public_ip(version='ipv4'):
53
  public_ipv4 = get_public_ip(version='ipv4')
54
 
55
  tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
56
- tunnel_port= 1769
57
  tunnel = tunnel_class(tunnel_port)
58
  tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
59
  tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
@@ -62,29 +64,36 @@ tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\
62
  if zrok_token:
63
  get_ipython().system('zrok enable {zrok_token} &> /dev/null')
64
  tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
 
 
65
  # ======================== TUNNEL ========================
66
 
67
 
68
- # automatic fixing path V2
69
  get_ipython().system('sed -i \'s|"tagger_hf_cache_dir": ".*"|"tagger_hf_cache_dir": "{webui_path}/models/interrogators/"|\' {webui_path}/config.json')
70
  get_ipython().system('sed -i \'s|"additional_networks_extra_lora_path": ".*"|"additional_networks_extra_lora_path": "{webui_path}/models/Lora/"|\' {webui_path}/config.json')
71
  get_ipython().system('sed -i \'s|"ad_extra_models_dir": ".*"|"ad_extra_models_dir": "{webui_path}/models/adetailer/"|\' {webui_path}/config.json')
72
- # ---
73
  get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
74
 
75
 
76
  with tunnel:
77
  get_ipython().run_line_magic('cd', '{webui_path}')
78
- commandline_arguments += f" --port=1769"
79
 
 
80
  if ngrok_token:
81
- commandline_arguments += ' --ngrok ' + ngrok_token
82
  if env != "Google Colab":
83
- commandline_arguments += f" --encrypt-pass=1769 --api"
84
 
85
  get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
86
 
87
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
88
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
89
- print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
 
 
 
 
 
 
90
 
 
5
  import time
6
  import json
7
  import requests
8
+ import cloudpickle as pickle
9
  from datetime import timedelta
10
+ from IPython.display import clear_output
11
 
12
 
13
  # ================= DETECT ENV =================
 
40
 
41
 
42
  # ======================== TUNNEL ========================
43
+ print('Please Wait...')
44
 
45
  def get_public_ip(version='ipv4'):
46
  try:
 
55
  public_ipv4 = get_public_ip(version='ipv4')
56
 
57
  tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
58
+ tunnel_port= 1734
59
  tunnel = tunnel_class(tunnel_port)
60
  tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
61
  tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
 
64
  if zrok_token:
65
  get_ipython().system('zrok enable {zrok_token} &> /dev/null')
66
  tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
67
+
68
+ clear_output()
69
  # ======================== TUNNEL ========================
70
 
71
 
72
+ # automatic fixing path V2
73
  get_ipython().system('sed -i \'s|"tagger_hf_cache_dir": ".*"|"tagger_hf_cache_dir": "{webui_path}/models/interrogators/"|\' {webui_path}/config.json')
74
  get_ipython().system('sed -i \'s|"additional_networks_extra_lora_path": ".*"|"additional_networks_extra_lora_path": "{webui_path}/models/Lora/"|\' {webui_path}/config.json')
75
  get_ipython().system('sed -i \'s|"ad_extra_models_dir": ".*"|"ad_extra_models_dir": "{webui_path}/models/adetailer/"|\' {webui_path}/config.json')
 
76
  get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
77
 
78
 
79
  with tunnel:
80
  get_ipython().run_line_magic('cd', '{webui_path}')
 
81
 
82
+ commandline_arguments += f' --port={tunnel_port}'
83
  if ngrok_token:
84
+ commandline_arguments += f' --ngrok {ngrok_token}'
85
  if env != "Google Colab":
86
+ commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
87
 
88
  get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
89
 
90
+
91
+ # after runnig
92
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
93
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
94
+ print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
95
+
96
+ ''' del zrok tunnel '''
97
+ if zrok_token:
98
+ get_ipython().system('zrok disable &> /dev/null')
99
 
files_cells/python/ru/auto-cleaner_ru.py CHANGED
@@ -233,13 +233,18 @@ directories = {
233
  """ functions """
234
  def clean_directory(directory):
235
  deleted_files = 0
 
 
236
  for root, dirs, files in os.walk(directory):
237
  for file in files:
 
 
238
  if file.endswith(".txt"):
239
  continue
240
- os.remove(os.path.join(root, file))
241
- if file.endswith((".safetensors", ".pt")):
242
  deleted_files += 1
 
 
243
  return deleted_files
244
 
245
  def update_memory_info():
 
233
  """ functions """
234
  def clean_directory(directory):
235
  deleted_files = 0
236
+ image_dir = directories['Изображения']
237
+
238
  for root, dirs, files in os.walk(directory):
239
  for file in files:
240
+ file_path = os.path.join(root, file)
241
+
242
  if file.endswith(".txt"):
243
  continue
244
+ if file.endswith((".safetensors", ".pt")) or root == image_dir: # fix for image counter
 
245
  deleted_files += 1
246
+
247
+ os.remove(file_path)
248
  return deleted_files
249
 
250
  def update_memory_info():
files_cells/python/ru/downloading_ru.py CHANGED
@@ -39,33 +39,31 @@ if not os.path.exists(flag_file):
39
  print("💿 Установка библиотек, это займет какое-то время:\n")
40
 
41
  install_lib = {
42
- "gdown": "pip install -U gdown",
43
- "aria2": "apt-get update && apt -y install aria2",
44
- "localtunnel": "npm install -g localtunnel &> /dev/null",
45
  "insightface": "pip install insightface",
46
  }
47
 
48
- # Dictionary of additional libraries specific to certain environments
49
  additional_libs = {
50
  "Google Colab": {
51
  "xformers": "pip install xformers==0.0.26.post1 --no-deps"
52
  },
53
  "Kaggle": {
54
- "xformers": "pip install -q xformers==0.0.26.post1",
55
- # "torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
 
56
  }
57
  }
58
 
59
- # If the current environment has additional libraries, update the install_lib dictionary
60
  if env in additional_libs:
61
  install_lib.update(additional_libs[env])
62
 
63
- # Loop through libraries and execute install commands
64
  for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
65
  print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
66
  subprocess.run(install_cmd, shell=True, capture_output=True)
67
 
68
- # Additional manual installation steps for specific packages
69
  with capture.capture_output() as cap:
70
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
71
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
@@ -378,7 +376,7 @@ def manual_download(url, dst_dir, file_name):
378
  basename = url.split("/")[-1] if file_name is None else file_name
379
  aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
380
 
381
- print(f"\n\033[32m{'---'*45}\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}033[0m")
382
  # print(url, dst_dir, file_name)
383
 
384
  # === CivitAi API ===
@@ -401,6 +399,12 @@ def manual_download(url, dst_dir, file_name):
401
 
402
  # -- GDrive --
403
  if 'drive.google' in url:
 
 
 
 
 
 
404
  if 'folders' in url:
405
  get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
406
  else:
@@ -416,12 +420,8 @@ def manual_download(url, dst_dir, file_name):
416
  get_ipython().system('aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}')
417
 
418
  # -- Other --
419
- elif 'http' in url or 'magnet' in url:
420
- if file_name:
421
- get_ipython().system('aria2c {aria2_args} -d {dst_dir} -o {file_name} {url}')
422
- else:
423
- parsed_link = '"{}"'.format(url)
424
- get_ipython().system('aria2c {aria2_args} -d {dst_dir} -Z {parsed_link}')
425
 
426
  def download(url):
427
  links_and_paths = url.split(',')
 
39
  print("💿 Установка библиотек, это займет какое-то время:\n")
40
 
41
  install_lib = {
42
+ "aria2": "apt -y install aria2",
43
+ "localtunnel": "npm install -g localtunnel",
 
44
  "insightface": "pip install insightface",
45
  }
46
 
 
47
  additional_libs = {
48
  "Google Colab": {
49
  "xformers": "pip install xformers==0.0.26.post1 --no-deps"
50
  },
51
  "Kaggle": {
52
+ "xformers": "pip install xformers==0.0.26.post1",
53
+ # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
54
+ "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
55
  }
56
  }
57
 
 
58
  if env in additional_libs:
59
  install_lib.update(additional_libs[env])
60
 
61
+ # Loop through libraries
62
  for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
63
  print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
64
  subprocess.run(install_cmd, shell=True, capture_output=True)
65
 
66
+ # Additional specific packages
67
  with capture.capture_output() as cap:
68
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
69
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
 
376
  basename = url.split("/")[-1] if file_name is None else file_name
377
  aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
378
 
379
+ print(f"\n\033[32m{'---'*45}\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[0m")
380
  # print(url, dst_dir, file_name)
381
 
382
  # === CivitAi API ===
 
399
 
400
  # -- GDrive --
401
  if 'drive.google' in url:
402
+ try:
403
+ have_drive_link
404
+ except:
405
+ get_ipython().system('pip install -U gdown > /dev/null')
406
+ have_drive_link = True
407
+
408
  if 'folders' in url:
409
  get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
410
  else:
 
420
  get_ipython().system('aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} {url}')
421
 
422
  # -- Other --
423
+ elif 'http' in url:
424
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} {url}")
 
 
 
 
425
 
426
  def download(url):
427
  links_and_paths = url.split(',')
files_cells/python/ru/launch_ru.py CHANGED
@@ -5,7 +5,9 @@ import re
5
  import time
6
  import json
7
  import requests
 
8
  from datetime import timedelta
 
9
 
10
 
11
  # ================= DETECT ENV =================
@@ -38,7 +40,7 @@ commandline_arguments = settings['commandline_arguments']
38
 
39
 
40
  # ======================== TUNNEL ========================
41
- import cloudpickle as pickle
42
 
43
  def get_public_ip(version='ipv4'):
44
  try:
@@ -53,7 +55,7 @@ def get_public_ip(version='ipv4'):
53
  public_ipv4 = get_public_ip(version='ipv4')
54
 
55
  tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
56
- tunnel_port= 1769
57
  tunnel = tunnel_class(tunnel_port)
58
  tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
59
  tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
@@ -62,6 +64,8 @@ tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\
62
  if zrok_token:
63
  get_ipython().system('zrok enable {zrok_token} &> /dev/null')
64
  tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
 
 
65
  # ======================== TUNNEL ========================
66
 
67
 
@@ -69,22 +73,27 @@ if zrok_token:
69
  get_ipython().system('sed -i \'s|"tagger_hf_cache_dir": ".*"|"tagger_hf_cache_dir": "{webui_path}/models/interrogators/"|\' {webui_path}/config.json')
70
  get_ipython().system('sed -i \'s|"additional_networks_extra_lora_path": ".*"|"additional_networks_extra_lora_path": "{webui_path}/models/Lora/"|\' {webui_path}/config.json')
71
  get_ipython().system('sed -i \'s|"ad_extra_models_dir": ".*"|"ad_extra_models_dir": "{webui_path}/models/adetailer/"|\' {webui_path}/config.json')
72
- # ---
73
  get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
74
 
75
 
76
  with tunnel:
77
  get_ipython().run_line_magic('cd', '{webui_path}')
78
- commandline_arguments += f" --port=1769"
79
 
 
80
  if ngrok_token:
81
- commandline_arguments += ' --ngrok ' + ngrok_token
82
  if env != "Google Colab":
83
- commandline_arguments += f" --encrypt-pass=1769 --api"
84
 
85
  get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
86
 
87
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
88
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
89
- print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
 
 
 
 
 
 
90
 
 
5
  import time
6
  import json
7
  import requests
8
+ import cloudpickle as pickle
9
  from datetime import timedelta
10
+ from IPython.display import clear_output
11
 
12
 
13
  # ================= DETECT ENV =================
 
40
 
41
 
42
  # ======================== TUNNEL ========================
43
+ print('Please Wait...')
44
 
45
  def get_public_ip(version='ipv4'):
46
  try:
 
55
  public_ipv4 = get_public_ip(version='ipv4')
56
 
57
  tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
58
+ tunnel_port= 1734
59
  tunnel = tunnel_class(tunnel_port)
60
  tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
61
  tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
 
64
  if zrok_token:
65
  get_ipython().system('zrok enable {zrok_token} &> /dev/null')
66
  tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
67
+
68
+ clear_output()
69
  # ======================== TUNNEL ========================
70
 
71
 
 
73
  get_ipython().system('sed -i \'s|"tagger_hf_cache_dir": ".*"|"tagger_hf_cache_dir": "{webui_path}/models/interrogators/"|\' {webui_path}/config.json')
74
  get_ipython().system('sed -i \'s|"additional_networks_extra_lora_path": ".*"|"additional_networks_extra_lora_path": "{webui_path}/models/Lora/"|\' {webui_path}/config.json')
75
  get_ipython().system('sed -i \'s|"ad_extra_models_dir": ".*"|"ad_extra_models_dir": "{webui_path}/models/adetailer/"|\' {webui_path}/config.json')
 
76
  get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
77
 
78
 
79
  with tunnel:
80
  get_ipython().run_line_magic('cd', '{webui_path}')
 
81
 
82
+ commandline_arguments += f' --port={tunnel_port}'
83
  if ngrok_token:
84
+ commandline_arguments += f' --ngrok {ngrok_token}'
85
  if env != "Google Colab":
86
+ commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
87
 
88
  get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
89
 
90
+
91
+ # after runnig
92
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
93
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
94
+ print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
95
+
96
+ ''' del zrok tunnel '''
97
+ if zrok_token:
98
+ get_ipython().system('zrok disable &> /dev/null')
99