Upload 3 files
Browse files- app.py +12 -8
- env.py +10 -10
- modutils.py +263 -50
app.py
CHANGED
@@ -23,7 +23,7 @@ from mod import (clear_cache, get_repo_safetensors, is_repo_name, is_repo_exists
|
|
23 |
from modutils import (search_civitai_lora, select_civitai_lora, search_civitai_lora_json,
|
24 |
download_my_lora_flux, get_all_lora_tupled_list, apply_lora_prompt_flux,
|
25 |
update_loras_flux, update_civitai_selection, get_civitai_tag, CIVITAI_SORT, CIVITAI_PERIOD,
|
26 |
-
get_t2i_model_info, download_hf_file)
|
27 |
from tagger.tagger import predict_tags_wd, compose_prompt_to_copy
|
28 |
from tagger.fl2flux import predict_tags_fl2_flux
|
29 |
|
@@ -741,9 +741,13 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=css, delete_ca
|
|
741 |
gr.Markdown("[Check the list of FLUX LoRAs](https://huggingface.co/models?other=base_model:adapter:black-forest-labs/FLUX.1-dev)", elem_id="lora_list")
|
742 |
with gr.Column():
|
743 |
progress_bar = gr.Markdown(elem_id="progress",visible=False)
|
744 |
-
result = gr.Image(label="Generated Image", format="png", show_share_button=False, interactive=False)
|
745 |
with gr.Accordion("History", open=False):
|
746 |
-
history_gallery = gr.Gallery(label="History", columns=6, object_fit="contain", interactive=False
|
|
|
|
|
|
|
|
|
747 |
with gr.Group():
|
748 |
with gr.Row():
|
749 |
model_name = gr.Dropdown(label="Base Model", info="You can enter a huggingface model repo_id or path of single safetensors file to want to use.",
|
@@ -880,11 +884,11 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=css, delete_ca
|
|
880 |
outputs=[result, seed, progress_bar],
|
881 |
queue=True,
|
882 |
show_api=True,
|
883 |
-
).then( # Update the history gallery
|
884 |
-
|
885 |
-
|
886 |
-
|
887 |
-
)
|
888 |
|
889 |
input_image.upload(preprocess_i2i_image, [input_image, input_image_preprocess, height, width], [input_image], queue=False, show_api=False)
|
890 |
gr.on(
|
|
|
23 |
from modutils import (search_civitai_lora, select_civitai_lora, search_civitai_lora_json,
|
24 |
download_my_lora_flux, get_all_lora_tupled_list, apply_lora_prompt_flux,
|
25 |
update_loras_flux, update_civitai_selection, get_civitai_tag, CIVITAI_SORT, CIVITAI_PERIOD,
|
26 |
+
get_t2i_model_info, download_hf_file, save_image_history)
|
27 |
from tagger.tagger import predict_tags_wd, compose_prompt_to_copy
|
28 |
from tagger.fl2flux import predict_tags_fl2_flux
|
29 |
|
|
|
741 |
gr.Markdown("[Check the list of FLUX LoRAs](https://huggingface.co/models?other=base_model:adapter:black-forest-labs/FLUX.1-dev)", elem_id="lora_list")
|
742 |
with gr.Column():
|
743 |
progress_bar = gr.Markdown(elem_id="progress",visible=False)
|
744 |
+
result = gr.Image(label="Generated Image", format="png", type="filepath", show_share_button=False, interactive=False)
|
745 |
with gr.Accordion("History", open=False):
|
746 |
+
history_gallery = gr.Gallery(label="History", columns=6, object_fit="contain", interactive=False, format="png",
|
747 |
+
show_share_button=False, show_download_button=True)
|
748 |
+
history_files = gr.Files(interactive=False, visible=False)
|
749 |
+
history_clear_button = gr.Button(value="Clear History", variant="secondary")
|
750 |
+
history_clear_button.click(lambda: ([], []), None, [history_gallery, history_files], queue=False, show_api=False)
|
751 |
with gr.Group():
|
752 |
with gr.Row():
|
753 |
model_name = gr.Dropdown(label="Base Model", info="You can enter a huggingface model repo_id or path of single safetensors file to want to use.",
|
|
|
884 |
outputs=[result, seed, progress_bar],
|
885 |
queue=True,
|
886 |
show_api=True,
|
887 |
+
#).then( # Update the history gallery
|
888 |
+
# fn=lambda x, history: update_history(x, history),
|
889 |
+
# inputs=[result, history_gallery],
|
890 |
+
# outputs=history_gallery,
|
891 |
+
).success(save_image_history, [result, history_gallery, history_files, model_name], [history_gallery, history_files], queue=False, show_api=False)
|
892 |
|
893 |
input_image.upload(preprocess_i2i_image, [input_image, input_image_preprocess, height, width], [input_image], queue=False, show_api=False)
|
894 |
gr.on(
|
env.py
CHANGED
@@ -3,7 +3,7 @@ import os
|
|
3 |
|
4 |
CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
|
5 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
6 |
-
|
7 |
|
8 |
|
9 |
num_loras = 3
|
@@ -75,25 +75,25 @@ HF_MODEL_USER_LIKES = [] # sorted by number of likes
|
|
75 |
HF_MODEL_USER_EX = [] # sorted by a special rule
|
76 |
|
77 |
# - **Download Models**
|
78 |
-
|
79 |
]
|
80 |
|
81 |
# - **Download VAEs**
|
82 |
-
|
83 |
]
|
84 |
|
85 |
# - **Download LoRAs**
|
86 |
-
|
87 |
]
|
88 |
|
89 |
DIFFUSERS_FORMAT_LORAS = []
|
90 |
|
91 |
-
|
92 |
-
os.makedirs(
|
93 |
-
|
94 |
-
os.makedirs(
|
95 |
-
|
96 |
-
os.makedirs(
|
97 |
|
98 |
|
99 |
HF_LORA_PRIVATE_REPOS1 = []
|
|
|
3 |
|
4 |
CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
|
5 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
6 |
+
HF_READ_TOKEN = os.environ.get('HF_READ_TOKEN') # only use for private repo
|
7 |
|
8 |
|
9 |
num_loras = 3
|
|
|
75 |
HF_MODEL_USER_EX = [] # sorted by a special rule
|
76 |
|
77 |
# - **Download Models**
|
78 |
+
DOWNLOAD_MODEL_LIST = [
|
79 |
]
|
80 |
|
81 |
# - **Download VAEs**
|
82 |
+
DOWNLOAD_VAE_LIST = [
|
83 |
]
|
84 |
|
85 |
# - **Download LoRAs**
|
86 |
+
DOWNLOAD_LORA_LIST = [
|
87 |
]
|
88 |
|
89 |
DIFFUSERS_FORMAT_LORAS = []
|
90 |
|
91 |
+
DIRECTORY_MODELS = 'models'
|
92 |
+
os.makedirs(DIRECTORY_MODELS, exist_ok=True)
|
93 |
+
DIRECTORY_LORAS = 'loras'
|
94 |
+
os.makedirs(DIRECTORY_LORAS, exist_ok=True)
|
95 |
+
DIRECTORY_VAES = 'vaes'
|
96 |
+
os.makedirs(DIRECTORY_VAES, exist_ok=True)
|
97 |
|
98 |
|
99 |
HF_LORA_PRIVATE_REPOS1 = []
|
modutils.py
CHANGED
@@ -5,6 +5,7 @@ import os
|
|
5 |
import re
|
6 |
from pathlib import Path
|
7 |
from PIL import Image
|
|
|
8 |
import shutil
|
9 |
import requests
|
10 |
from requests.adapters import HTTPAdapter
|
@@ -12,11 +13,16 @@ from urllib3.util import Retry
|
|
12 |
import urllib.parse
|
13 |
import pandas as pd
|
14 |
from huggingface_hub import HfApi, HfFolder, hf_hub_download, snapshot_download
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
|
17 |
from env import (HF_LORA_PRIVATE_REPOS1, HF_LORA_PRIVATE_REPOS2,
|
18 |
HF_MODEL_USER_EX, HF_MODEL_USER_LIKES, DIFFUSERS_FORMAT_LORAS,
|
19 |
-
|
20 |
|
21 |
|
22 |
MODEL_TYPE_DICT = {
|
@@ -46,7 +52,6 @@ def is_repo_name(s):
|
|
46 |
return re.fullmatch(r'^[^/]+?/[^/]+?$', s)
|
47 |
|
48 |
|
49 |
-
from translatepy import Translator
|
50 |
translator = Translator()
|
51 |
def translate_to_en(input: str):
|
52 |
try:
|
@@ -64,6 +69,7 @@ def get_local_model_list(dir_path):
|
|
64 |
if file.suffix in valid_extensions:
|
65 |
file_path = str(Path(f"{dir_path}/{file.name}"))
|
66 |
model_list.append(file_path)
|
|
|
67 |
return model_list
|
68 |
|
69 |
|
@@ -98,21 +104,81 @@ def split_hf_url(url: str):
|
|
98 |
print(e)
|
99 |
|
100 |
|
101 |
-
def download_hf_file(directory, url, progress=gr.Progress(track_tqdm=True)):
|
102 |
-
hf_token = get_token()
|
103 |
repo_id, filename, subfolder, repo_type = split_hf_url(url)
|
|
|
|
|
|
|
104 |
try:
|
105 |
-
print(f"
|
106 |
-
|
107 |
-
else: path = hf_hub_download(repo_id=repo_id, filename=filename, repo_type=repo_type, local_dir=directory, token=hf_token)
|
108 |
return path
|
109 |
except Exception as e:
|
110 |
-
print(f"
|
111 |
return None
|
112 |
|
113 |
|
114 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
115 |
url = url.strip()
|
|
|
|
|
116 |
if "drive.google.com" in url:
|
117 |
original_dir = os.getcwd()
|
118 |
os.chdir(directory)
|
@@ -123,18 +189,48 @@ def download_things(directory, url, hf_token="", civitai_api_key=""):
|
|
123 |
# url = urllib.parse.quote(url, safe=':/') # fix encoding
|
124 |
if "/blob/" in url:
|
125 |
url = url.replace("/blob/", "/resolve/")
|
126 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
elif "civitai.com" in url:
|
128 |
-
|
129 |
-
|
130 |
-
if civitai_api_key:
|
131 |
-
url = url + f"?token={civitai_api_key}"
|
132 |
-
os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
|
133 |
-
else:
|
134 |
print("\033[91mYou need an API key to download Civitai models.\033[0m")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
else:
|
136 |
os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
|
137 |
|
|
|
|
|
138 |
|
139 |
def get_download_file(temp_dir, url, civitai_key="", progress=gr.Progress(track_tqdm=True)):
|
140 |
if not "http" in url and is_repo_name(url) and not Path(url).exists():
|
@@ -173,7 +269,7 @@ def to_lora_key(path: str):
|
|
173 |
|
174 |
def to_lora_path(key: str):
|
175 |
if Path(key).is_file(): return key
|
176 |
-
path = Path(f"{
|
177 |
return str(path)
|
178 |
|
179 |
|
@@ -203,25 +299,21 @@ def save_images(images: list[Image.Image], metadatas: list[str]):
|
|
203 |
raise Exception(f"Failed to save image file:") from e
|
204 |
|
205 |
|
206 |
-
def save_gallery_images(images, progress=gr.Progress(track_tqdm=True)):
|
207 |
-
from datetime import datetime, timezone, timedelta
|
208 |
progress(0, desc="Updating gallery...")
|
209 |
-
|
210 |
-
|
211 |
-
i = 1
|
212 |
-
if not images: return images, gr.update(visible=False)
|
213 |
output_images = []
|
214 |
output_paths = []
|
215 |
-
for image in images:
|
216 |
-
filename = basename
|
217 |
-
i += 1
|
218 |
oldpath = Path(image[0])
|
219 |
newpath = oldpath
|
220 |
try:
|
221 |
if oldpath.exists():
|
222 |
newpath = oldpath.resolve().rename(Path(filename).resolve())
|
223 |
except Exception as e:
|
224 |
-
|
225 |
finally:
|
226 |
output_paths.append(str(newpath))
|
227 |
output_images.append((str(newpath), str(filename)))
|
@@ -229,10 +321,47 @@ def save_gallery_images(images, progress=gr.Progress(track_tqdm=True)):
|
|
229 |
return gr.update(value=output_images), gr.update(value=output_paths, visible=True)
|
230 |
|
231 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
def download_private_repo(repo_id, dir_path, is_replace):
|
233 |
-
if not
|
234 |
try:
|
235 |
-
snapshot_download(repo_id=repo_id, local_dir=dir_path, allow_patterns=['*.ckpt', '*.pt', '*.pth', '*.safetensors', '*.bin'],
|
236 |
except Exception as e:
|
237 |
print(f"Error: Failed to download {repo_id}.")
|
238 |
print(e)
|
@@ -250,9 +379,9 @@ private_model_path_repo_dict = {} # {"local filepath": "huggingface repo_id", ..
|
|
250 |
def get_private_model_list(repo_id, dir_path):
|
251 |
global private_model_path_repo_dict
|
252 |
api = HfApi()
|
253 |
-
if not
|
254 |
try:
|
255 |
-
files = api.list_repo_files(repo_id, token=
|
256 |
except Exception as e:
|
257 |
print(f"Error: Failed to list {repo_id}.")
|
258 |
print(e)
|
@@ -270,11 +399,11 @@ def get_private_model_list(repo_id, dir_path):
|
|
270 |
def download_private_file(repo_id, path, is_replace):
|
271 |
file = Path(path)
|
272 |
newpath = Path(f'{file.parent.name}/{escape_lora_basename(file.stem)}{file.suffix}') if is_replace else file
|
273 |
-
if not
|
274 |
filename = file.name
|
275 |
dirname = file.parent.name
|
276 |
try:
|
277 |
-
hf_hub_download(repo_id=repo_id, filename=filename, local_dir=dirname,
|
278 |
except Exception as e:
|
279 |
print(f"Error: Failed to download {filename}.")
|
280 |
print(e)
|
@@ -404,9 +533,9 @@ def get_private_lora_model_lists():
|
|
404 |
models1 = []
|
405 |
models2 = []
|
406 |
for repo in HF_LORA_PRIVATE_REPOS1:
|
407 |
-
models1.extend(get_private_model_list(repo,
|
408 |
for repo in HF_LORA_PRIVATE_REPOS2:
|
409 |
-
models2.extend(get_private_model_list(repo,
|
410 |
models = list_uniq(models1 + sorted(models2))
|
411 |
private_lora_model_list = models.copy()
|
412 |
return models
|
@@ -451,7 +580,7 @@ def get_civitai_info(path):
|
|
451 |
|
452 |
|
453 |
def get_lora_model_list():
|
454 |
-
loras = list_uniq(get_private_lora_model_lists() + DIFFUSERS_FORMAT_LORAS + get_local_model_list(
|
455 |
loras.insert(0, "None")
|
456 |
loras.insert(0, "")
|
457 |
return loras
|
@@ -503,14 +632,14 @@ def update_lora_dict(path):
|
|
503 |
def download_lora(dl_urls: str):
|
504 |
global loras_url_to_path_dict
|
505 |
dl_path = ""
|
506 |
-
before = get_local_model_list(
|
507 |
urls = []
|
508 |
for url in [url.strip() for url in dl_urls.split(',')]:
|
509 |
-
local_path = f"{
|
510 |
if not Path(local_path).exists():
|
511 |
-
download_things(
|
512 |
urls.append(url)
|
513 |
-
after = get_local_model_list(
|
514 |
new_files = list_sub(after, before)
|
515 |
i = 0
|
516 |
for file in new_files:
|
@@ -761,12 +890,14 @@ def update_loras(prompt, prompt_syntax, lora1, lora1_wt, lora2, lora2_wt, lora3,
|
|
761 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
762 |
|
763 |
|
764 |
-
def get_my_lora(link_url):
|
765 |
-
|
|
|
|
|
766 |
for url in [url.strip() for url in link_url.split(',')]:
|
767 |
-
if not Path(f"{
|
768 |
-
download_things(
|
769 |
-
after = get_local_model_list(
|
770 |
new_files = list_sub(after, before)
|
771 |
for file in new_files:
|
772 |
path = Path(file)
|
@@ -774,11 +905,16 @@ def get_my_lora(link_url):
|
|
774 |
new_path = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
775 |
path.resolve().rename(new_path.resolve())
|
776 |
update_lora_dict(str(new_path))
|
|
|
777 |
new_lora_model_list = get_lora_model_list()
|
778 |
new_lora_tupled_list = get_all_lora_tupled_list()
|
779 |
-
|
|
|
|
|
|
|
|
|
780 |
return gr.update(
|
781 |
-
choices=new_lora_tupled_list, value=
|
782 |
), gr.update(
|
783 |
choices=new_lora_tupled_list
|
784 |
), gr.update(
|
@@ -787,6 +923,8 @@ def get_my_lora(link_url):
|
|
787 |
choices=new_lora_tupled_list
|
788 |
), gr.update(
|
789 |
choices=new_lora_tupled_list
|
|
|
|
|
790 |
)
|
791 |
|
792 |
|
@@ -794,12 +932,12 @@ def upload_file_lora(files, progress=gr.Progress(track_tqdm=True)):
|
|
794 |
progress(0, desc="Uploading...")
|
795 |
file_paths = [file.name for file in files]
|
796 |
progress(1, desc="Uploaded.")
|
797 |
-
return gr.update(value=file_paths, visible=True), gr.update(
|
798 |
|
799 |
|
800 |
def move_file_lora(filepaths):
|
801 |
for file in filepaths:
|
802 |
-
path = Path(shutil.move(Path(file).resolve(), Path(f"./{
|
803 |
newpath = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
804 |
path.resolve().rename(newpath.resolve())
|
805 |
update_lora_dict(str(newpath))
|
@@ -941,7 +1079,7 @@ def update_civitai_selection(evt: gr.SelectData):
|
|
941 |
selected = civitai_last_choices[selected_index][1]
|
942 |
return gr.update(value=selected)
|
943 |
except Exception:
|
944 |
-
return gr.update(
|
945 |
|
946 |
|
947 |
def select_civitai_lora(search_result):
|
@@ -1425,3 +1563,78 @@ def get_model_pipeline(repo_id: str):
|
|
1425 |
else:
|
1426 |
return default
|
1427 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
import re
|
6 |
from pathlib import Path
|
7 |
from PIL import Image
|
8 |
+
import numpy as np
|
9 |
import shutil
|
10 |
import requests
|
11 |
from requests.adapters import HTTPAdapter
|
|
|
13 |
import urllib.parse
|
14 |
import pandas as pd
|
15 |
from huggingface_hub import HfApi, HfFolder, hf_hub_download, snapshot_download
|
16 |
+
from translatepy import Translator
|
17 |
+
from unidecode import unidecode
|
18 |
+
import copy
|
19 |
+
from datetime import datetime, timezone, timedelta
|
20 |
+
FILENAME_TIMEZONE = timezone(timedelta(hours=9)) # JST
|
21 |
|
22 |
|
23 |
from env import (HF_LORA_PRIVATE_REPOS1, HF_LORA_PRIVATE_REPOS2,
|
24 |
HF_MODEL_USER_EX, HF_MODEL_USER_LIKES, DIFFUSERS_FORMAT_LORAS,
|
25 |
+
DIRECTORY_LORAS, HF_READ_TOKEN, HF_TOKEN, CIVITAI_API_KEY)
|
26 |
|
27 |
|
28 |
MODEL_TYPE_DICT = {
|
|
|
52 |
return re.fullmatch(r'^[^/]+?/[^/]+?$', s)
|
53 |
|
54 |
|
|
|
55 |
translator = Translator()
|
56 |
def translate_to_en(input: str):
|
57 |
try:
|
|
|
69 |
if file.suffix in valid_extensions:
|
70 |
file_path = str(Path(f"{dir_path}/{file.name}"))
|
71 |
model_list.append(file_path)
|
72 |
+
#print('\033[34mFILE: ' + file_path + '\033[0m')
|
73 |
return model_list
|
74 |
|
75 |
|
|
|
104 |
print(e)
|
105 |
|
106 |
|
107 |
+
def download_hf_file(directory, url, force_filename="", hf_token="", progress=gr.Progress(track_tqdm=True)):
|
|
|
108 |
repo_id, filename, subfolder, repo_type = split_hf_url(url)
|
109 |
+
kwargs = {}
|
110 |
+
if subfolder is not None: kwargs["subfolder"] = subfolder
|
111 |
+
if force_filename: kwargs["force_filename"] = force_filename
|
112 |
try:
|
113 |
+
print(f"Start downloading: {url} to {directory}")
|
114 |
+
path = hf_hub_download(repo_id=repo_id, filename=filename, repo_type=repo_type, local_dir=directory, token=hf_token, **kwargs)
|
|
|
115 |
return path
|
116 |
except Exception as e:
|
117 |
+
print(f"Download failed: {url} {e}")
|
118 |
return None
|
119 |
|
120 |
|
121 |
+
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:127.0) Gecko/20100101 Firefox/127.0'
|
122 |
+
|
123 |
+
|
124 |
+
def request_json_data(url):
|
125 |
+
model_version_id = url.split('/')[-1]
|
126 |
+
if "?modelVersionId=" in model_version_id:
|
127 |
+
match = re.search(r'modelVersionId=(\d+)', url)
|
128 |
+
model_version_id = match.group(1)
|
129 |
+
|
130 |
+
endpoint_url = f"https://civitai.com/api/v1/model-versions/{model_version_id}"
|
131 |
+
|
132 |
+
params = {}
|
133 |
+
headers = {'User-Agent': USER_AGENT, 'content-type': 'application/json'}
|
134 |
+
session = requests.Session()
|
135 |
+
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
136 |
+
session.mount("https://", HTTPAdapter(max_retries=retries))
|
137 |
+
|
138 |
+
try:
|
139 |
+
result = session.get(endpoint_url, params=params, headers=headers, stream=True, timeout=(3.0, 15))
|
140 |
+
result.raise_for_status()
|
141 |
+
json_data = result.json()
|
142 |
+
return json_data if json_data else None
|
143 |
+
except Exception as e:
|
144 |
+
print(f"Error: {e}")
|
145 |
+
return None
|
146 |
+
|
147 |
+
|
148 |
+
class ModelInformation:
|
149 |
+
def __init__(self, json_data):
|
150 |
+
self.model_version_id = json_data.get("id", "")
|
151 |
+
self.model_id = json_data.get("modelId", "")
|
152 |
+
self.download_url = json_data.get("downloadUrl", "")
|
153 |
+
self.model_url = f"https://civitai.com/models/{self.model_id}?modelVersionId={self.model_version_id}"
|
154 |
+
self.filename_url = next(
|
155 |
+
(v.get("name", "") for v in json_data.get("files", []) if str(self.model_version_id) in v.get("downloadUrl", "")), ""
|
156 |
+
)
|
157 |
+
self.filename_url = self.filename_url if self.filename_url else ""
|
158 |
+
self.description = json_data.get("description", "")
|
159 |
+
if self.description is None: self.description = ""
|
160 |
+
self.model_name = json_data.get("model", {}).get("name", "")
|
161 |
+
self.model_type = json_data.get("model", {}).get("type", "")
|
162 |
+
self.nsfw = json_data.get("model", {}).get("nsfw", False)
|
163 |
+
self.poi = json_data.get("model", {}).get("poi", False)
|
164 |
+
self.images = [img.get("url", "") for img in json_data.get("images", [])]
|
165 |
+
self.example_prompt = json_data.get("trainedWords", [""])[0] if json_data.get("trainedWords") else ""
|
166 |
+
self.original_json = copy.deepcopy(json_data)
|
167 |
+
|
168 |
+
|
169 |
+
def retrieve_model_info(url):
|
170 |
+
json_data = request_json_data(url)
|
171 |
+
if not json_data:
|
172 |
+
return None
|
173 |
+
model_descriptor = ModelInformation(json_data)
|
174 |
+
return model_descriptor
|
175 |
+
|
176 |
+
|
177 |
+
def download_things(directory, url, hf_token="", civitai_api_key="", romanize=False):
|
178 |
+
hf_token = get_token()
|
179 |
url = url.strip()
|
180 |
+
downloaded_file_path = None
|
181 |
+
|
182 |
if "drive.google.com" in url:
|
183 |
original_dir = os.getcwd()
|
184 |
os.chdir(directory)
|
|
|
189 |
# url = urllib.parse.quote(url, safe=':/') # fix encoding
|
190 |
if "/blob/" in url:
|
191 |
url = url.replace("/blob/", "/resolve/")
|
192 |
+
|
193 |
+
filename = unidecode(url.split('/')[-1]) if romanize else url.split('/')[-1]
|
194 |
+
|
195 |
+
download_hf_file(directory, url, filename, hf_token)
|
196 |
+
|
197 |
+
downloaded_file_path = os.path.join(directory, filename)
|
198 |
+
|
199 |
elif "civitai.com" in url:
|
200 |
+
|
201 |
+
if not civitai_api_key:
|
|
|
|
|
|
|
|
|
202 |
print("\033[91mYou need an API key to download Civitai models.\033[0m")
|
203 |
+
|
204 |
+
model_profile = retrieve_model_info(url)
|
205 |
+
if model_profile.download_url and model_profile.filename_url:
|
206 |
+
url = model_profile.download_url
|
207 |
+
filename = unidecode(model_profile.filename_url) if romanize else model_profile.filename_url
|
208 |
+
else:
|
209 |
+
if "?" in url:
|
210 |
+
url = url.split("?")[0]
|
211 |
+
filename = ""
|
212 |
+
|
213 |
+
url_dl = url + f"?token={civitai_api_key}"
|
214 |
+
print(f"Filename: {filename}")
|
215 |
+
|
216 |
+
param_filename = ""
|
217 |
+
if filename:
|
218 |
+
param_filename = f"-o '{filename}'"
|
219 |
+
|
220 |
+
aria2_command = (
|
221 |
+
f'aria2c --console-log-level=error --summary-interval=10 -c -x 16 '
|
222 |
+
f'-k 1M -s 16 -d "{directory}" {param_filename} "{url_dl}"'
|
223 |
+
)
|
224 |
+
os.system(aria2_command)
|
225 |
+
|
226 |
+
if param_filename and os.path.exists(os.path.join(directory, filename)):
|
227 |
+
downloaded_file_path = os.path.join(directory, filename)
|
228 |
+
|
229 |
else:
|
230 |
os.system(f"aria2c --console-log-level=error --summary-interval=10 -c -x 16 -k 1M -s 16 -d {directory} {url}")
|
231 |
|
232 |
+
return downloaded_file_path
|
233 |
+
|
234 |
|
235 |
def get_download_file(temp_dir, url, civitai_key="", progress=gr.Progress(track_tqdm=True)):
|
236 |
if not "http" in url and is_repo_name(url) and not Path(url).exists():
|
|
|
269 |
|
270 |
def to_lora_path(key: str):
|
271 |
if Path(key).is_file(): return key
|
272 |
+
path = Path(f"{DIRECTORY_LORAS}/{escape_lora_basename(key)}.safetensors")
|
273 |
return str(path)
|
274 |
|
275 |
|
|
|
299 |
raise Exception(f"Failed to save image file:") from e
|
300 |
|
301 |
|
302 |
+
def save_gallery_images(images, model_name="", progress=gr.Progress(track_tqdm=True)):
|
|
|
303 |
progress(0, desc="Updating gallery...")
|
304 |
+
basename = f"{model_name.split('/')[-1]}_{datetime.now(FILENAME_TIMEZONE).strftime('%Y%m%d_%H%M%S')}_"
|
305 |
+
if not images: return images, gr.update()
|
|
|
|
|
306 |
output_images = []
|
307 |
output_paths = []
|
308 |
+
for i, image in enumerate(images):
|
309 |
+
filename = f"{basename}{str(i + 1)}.png"
|
|
|
310 |
oldpath = Path(image[0])
|
311 |
newpath = oldpath
|
312 |
try:
|
313 |
if oldpath.exists():
|
314 |
newpath = oldpath.resolve().rename(Path(filename).resolve())
|
315 |
except Exception as e:
|
316 |
+
print(e)
|
317 |
finally:
|
318 |
output_paths.append(str(newpath))
|
319 |
output_images.append((str(newpath), str(filename)))
|
|
|
321 |
return gr.update(value=output_images), gr.update(value=output_paths, visible=True)
|
322 |
|
323 |
|
324 |
+
def save_gallery_history(images, files, history_gallery, history_files, progress=gr.Progress(track_tqdm=True)):
|
325 |
+
if not images or not files: return gr.update(), gr.update()
|
326 |
+
if not history_gallery: history_gallery = []
|
327 |
+
if not history_files: history_files = []
|
328 |
+
output_gallery = images + history_gallery
|
329 |
+
output_files = files + history_files
|
330 |
+
return gr.update(value=output_gallery), gr.update(value=output_files, visible=True)
|
331 |
+
|
332 |
+
|
333 |
+
def save_image_history(image, gallery, files, model_name: str, progress=gr.Progress(track_tqdm=True)):
|
334 |
+
if not gallery: gallery = []
|
335 |
+
if not files: files = []
|
336 |
+
try:
|
337 |
+
basename = f"{model_name.split('/')[-1]}_{datetime.now(FILENAME_TIMEZONE).strftime('%Y%m%d_%H%M%S')}"
|
338 |
+
if image is None or not isinstance(image, (str, Image.Image, np.ndarray, tuple)): return gr.update(), gr.update()
|
339 |
+
filename = f"{basename}.png"
|
340 |
+
if isinstance(image, tuple): image = image[0]
|
341 |
+
if isinstance(image, str): oldpath = image
|
342 |
+
elif isinstance(image, Image.Image):
|
343 |
+
oldpath = "temp.png"
|
344 |
+
image.save(oldpath)
|
345 |
+
elif isinstance(image, np.ndarray):
|
346 |
+
oldpath = "temp.png"
|
347 |
+
Image.fromarray(image).convert('RGBA').save(oldpath)
|
348 |
+
oldpath = Path(oldpath)
|
349 |
+
newpath = oldpath
|
350 |
+
if oldpath.exists():
|
351 |
+
shutil.copy(oldpath.resolve(), Path(filename).resolve())
|
352 |
+
newpath = Path(filename).resolve()
|
353 |
+
files.insert(0, str(newpath))
|
354 |
+
gallery.insert(0, (str(newpath), str(filename)))
|
355 |
+
except Exception as e:
|
356 |
+
print(e)
|
357 |
+
finally:
|
358 |
+
return gr.update(value=gallery), gr.update(value=files, visible=True)
|
359 |
+
|
360 |
+
|
361 |
def download_private_repo(repo_id, dir_path, is_replace):
|
362 |
+
if not HF_READ_TOKEN: return
|
363 |
try:
|
364 |
+
snapshot_download(repo_id=repo_id, local_dir=dir_path, allow_patterns=['*.ckpt', '*.pt', '*.pth', '*.safetensors', '*.bin'], token=HF_READ_TOKEN)
|
365 |
except Exception as e:
|
366 |
print(f"Error: Failed to download {repo_id}.")
|
367 |
print(e)
|
|
|
379 |
def get_private_model_list(repo_id, dir_path):
|
380 |
global private_model_path_repo_dict
|
381 |
api = HfApi()
|
382 |
+
if not HF_READ_TOKEN: return []
|
383 |
try:
|
384 |
+
files = api.list_repo_files(repo_id, token=HF_READ_TOKEN)
|
385 |
except Exception as e:
|
386 |
print(f"Error: Failed to list {repo_id}.")
|
387 |
print(e)
|
|
|
399 |
def download_private_file(repo_id, path, is_replace):
|
400 |
file = Path(path)
|
401 |
newpath = Path(f'{file.parent.name}/{escape_lora_basename(file.stem)}{file.suffix}') if is_replace else file
|
402 |
+
if not HF_READ_TOKEN or newpath.exists(): return
|
403 |
filename = file.name
|
404 |
dirname = file.parent.name
|
405 |
try:
|
406 |
+
hf_hub_download(repo_id=repo_id, filename=filename, local_dir=dirname, token=HF_READ_TOKEN)
|
407 |
except Exception as e:
|
408 |
print(f"Error: Failed to download {filename}.")
|
409 |
print(e)
|
|
|
533 |
models1 = []
|
534 |
models2 = []
|
535 |
for repo in HF_LORA_PRIVATE_REPOS1:
|
536 |
+
models1.extend(get_private_model_list(repo, DIRECTORY_LORAS))
|
537 |
for repo in HF_LORA_PRIVATE_REPOS2:
|
538 |
+
models2.extend(get_private_model_list(repo, DIRECTORY_LORAS))
|
539 |
models = list_uniq(models1 + sorted(models2))
|
540 |
private_lora_model_list = models.copy()
|
541 |
return models
|
|
|
580 |
|
581 |
|
582 |
def get_lora_model_list():
|
583 |
+
loras = list_uniq(get_private_lora_model_lists() + DIFFUSERS_FORMAT_LORAS + get_local_model_list(DIRECTORY_LORAS))
|
584 |
loras.insert(0, "None")
|
585 |
loras.insert(0, "")
|
586 |
return loras
|
|
|
632 |
def download_lora(dl_urls: str):
|
633 |
global loras_url_to_path_dict
|
634 |
dl_path = ""
|
635 |
+
before = get_local_model_list(DIRECTORY_LORAS)
|
636 |
urls = []
|
637 |
for url in [url.strip() for url in dl_urls.split(',')]:
|
638 |
+
local_path = f"{DIRECTORY_LORAS}/{url.split('/')[-1]}"
|
639 |
if not Path(local_path).exists():
|
640 |
+
download_things(DIRECTORY_LORAS, url, HF_TOKEN, CIVITAI_API_KEY)
|
641 |
urls.append(url)
|
642 |
+
after = get_local_model_list(DIRECTORY_LORAS)
|
643 |
new_files = list_sub(after, before)
|
644 |
i = 0
|
645 |
for file in new_files:
|
|
|
890 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
891 |
|
892 |
|
893 |
+
def get_my_lora(link_url, romanize):
|
894 |
+
l_name = ""
|
895 |
+
l_path = ""
|
896 |
+
before = get_local_model_list(DIRECTORY_LORAS)
|
897 |
for url in [url.strip() for url in link_url.split(',')]:
|
898 |
+
if not Path(f"{DIRECTORY_LORAS}/{url.split('/')[-1]}").exists():
|
899 |
+
l_name = download_things(DIRECTORY_LORAS, url, HF_TOKEN, CIVITAI_API_KEY, romanize)
|
900 |
+
after = get_local_model_list(DIRECTORY_LORAS)
|
901 |
new_files = list_sub(after, before)
|
902 |
for file in new_files:
|
903 |
path = Path(file)
|
|
|
905 |
new_path = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
906 |
path.resolve().rename(new_path.resolve())
|
907 |
update_lora_dict(str(new_path))
|
908 |
+
l_path = str(new_path)
|
909 |
new_lora_model_list = get_lora_model_list()
|
910 |
new_lora_tupled_list = get_all_lora_tupled_list()
|
911 |
+
msg_lora = "Downloaded"
|
912 |
+
if l_name:
|
913 |
+
msg_lora += f": <b>{l_name}</b>"
|
914 |
+
print(msg_lora)
|
915 |
+
|
916 |
return gr.update(
|
917 |
+
choices=new_lora_tupled_list, value=l_path
|
918 |
), gr.update(
|
919 |
choices=new_lora_tupled_list
|
920 |
), gr.update(
|
|
|
923 |
choices=new_lora_tupled_list
|
924 |
), gr.update(
|
925 |
choices=new_lora_tupled_list
|
926 |
+
), gr.update(
|
927 |
+
value=msg_lora
|
928 |
)
|
929 |
|
930 |
|
|
|
932 |
progress(0, desc="Uploading...")
|
933 |
file_paths = [file.name for file in files]
|
934 |
progress(1, desc="Uploaded.")
|
935 |
+
return gr.update(value=file_paths, visible=True), gr.update()
|
936 |
|
937 |
|
938 |
def move_file_lora(filepaths):
|
939 |
for file in filepaths:
|
940 |
+
path = Path(shutil.move(Path(file).resolve(), Path(f"./{DIRECTORY_LORAS}").resolve()))
|
941 |
newpath = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
942 |
path.resolve().rename(newpath.resolve())
|
943 |
update_lora_dict(str(newpath))
|
|
|
1079 |
selected = civitai_last_choices[selected_index][1]
|
1080 |
return gr.update(value=selected)
|
1081 |
except Exception:
|
1082 |
+
return gr.update()
|
1083 |
|
1084 |
|
1085 |
def select_civitai_lora(search_result):
|
|
|
1563 |
else:
|
1564 |
return default
|
1565 |
|
1566 |
+
|
1567 |
+
EXAMPLES_GUI = [
|
1568 |
+
[
|
1569 |
+
"1girl, souryuu asuka langley, neon genesis evangelion, plugsuit, pilot suit, red bodysuit, sitting, crossing legs, black eye patch, cat hat, throne, symmetrical, looking down, from bottom, looking at viewer, outdoors, masterpiece, best quality, very aesthetic, absurdres",
|
1570 |
+
"nsfw, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]",
|
1571 |
+
1,
|
1572 |
+
30,
|
1573 |
+
7.5,
|
1574 |
+
True,
|
1575 |
+
-1,
|
1576 |
+
"Euler a",
|
1577 |
+
1152,
|
1578 |
+
896,
|
1579 |
+
"votepurchase/animagine-xl-3.1",
|
1580 |
+
],
|
1581 |
+
[
|
1582 |
+
"solo, princess Zelda OOT, score_9, score_8_up, score_8, medium breasts, cute, eyelashes, cute small face, long hair, crown braid, hairclip, pointy ears, soft curvy body, looking at viewer, smile, blush, white dress, medium body, (((holding the Master Sword))), standing, deep forest in the background",
|
1583 |
+
"score_6, score_5, score_4, busty, ugly face, mutated hands, low res, blurry face, black and white,",
|
1584 |
+
1,
|
1585 |
+
30,
|
1586 |
+
5.,
|
1587 |
+
True,
|
1588 |
+
-1,
|
1589 |
+
"Euler a",
|
1590 |
+
1024,
|
1591 |
+
1024,
|
1592 |
+
"votepurchase/ponyDiffusionV6XL",
|
1593 |
+
],
|
1594 |
+
[
|
1595 |
+
"1girl, oomuro sakurako, yuru yuri, official art, school uniform, anime artwork, anime style, vibrant, studio anime, highly detailed, masterpiece, best quality, very aesthetic, absurdres",
|
1596 |
+
"photo, deformed, black and white, realism, disfigured, low contrast, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]",
|
1597 |
+
1,
|
1598 |
+
40,
|
1599 |
+
7.0,
|
1600 |
+
True,
|
1601 |
+
-1,
|
1602 |
+
"Euler a",
|
1603 |
+
1024,
|
1604 |
+
1024,
|
1605 |
+
"Raelina/Rae-Diffusion-XL-V2",
|
1606 |
+
],
|
1607 |
+
[
|
1608 |
+
"1girl, akaza akari, yuru yuri, official art, anime artwork, anime style, vibrant, studio anime, highly detailed, masterpiece, best quality, very aesthetic, absurdres",
|
1609 |
+
"photo, deformed, black and white, realism, disfigured, low contrast, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]",
|
1610 |
+
1,
|
1611 |
+
35,
|
1612 |
+
7.0,
|
1613 |
+
True,
|
1614 |
+
-1,
|
1615 |
+
"Euler a",
|
1616 |
+
1024,
|
1617 |
+
1024,
|
1618 |
+
"Raelina/Raemu-XL-V4",
|
1619 |
+
],
|
1620 |
+
[
|
1621 |
+
"yoshida yuuko, machikado mazoku, 1girl, solo, demon horns,horns, school uniform, long hair, open mouth, skirt, demon girl, ahoge, shiny, shiny hair, anime artwork",
|
1622 |
+
"nsfw, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]",
|
1623 |
+
1,
|
1624 |
+
50,
|
1625 |
+
7.,
|
1626 |
+
True,
|
1627 |
+
-1,
|
1628 |
+
"Euler a",
|
1629 |
+
1024,
|
1630 |
+
1024,
|
1631 |
+
"cagliostrolab/animagine-xl-3.1",
|
1632 |
+
],
|
1633 |
+
]
|
1634 |
+
|
1635 |
+
|
1636 |
+
RESOURCES = (
|
1637 |
+
"""### Resources
|
1638 |
+
- You can also try the image generator in Colab’s free tier, which provides free GPU [link](https://github.com/R3gm/SD_diffusers_interactive).
|
1639 |
+
"""
|
1640 |
+
)
|