Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
test: load duplicate bug
Browse files
app.py
CHANGED
@@ -132,6 +132,9 @@ def create_vc_fn(model_name, tgt_sr, net_g, vc, if_f0, version, file_index):
|
|
132 |
def load_model():
|
133 |
categories = []
|
134 |
if os.path.isfile("weights/folder_info.json"):
|
|
|
|
|
|
|
135 |
with open("weights/folder_info.json", "r", encoding="utf-8") as f:
|
136 |
folder_info = json.load(f)
|
137 |
for category_name, category_info in folder_info.items():
|
@@ -140,7 +143,11 @@ def load_model():
|
|
140 |
category_title = category_info['title']
|
141 |
category_folder = category_info['folder_path']
|
142 |
description = category_info['description']
|
|
|
143 |
models = []
|
|
|
|
|
|
|
144 |
with open(f"weights/{category_folder}/model_info.json", "r", encoding="utf-8") as f:
|
145 |
models_info = json.load(f)
|
146 |
for character_name, info in models_info.items():
|
@@ -176,9 +183,57 @@ def load_model():
|
|
176 |
else:
|
177 |
net_g = net_g.float()
|
178 |
vc = VC(tgt_sr, config)
|
179 |
-
print(f"Model loaded: {character_name} / {info['feature_retrieval_library']} | ({model_version})")
|
|
|
180 |
models.append((character_name, model_title, model_author, model_cover, model_version, create_vc_fn(model_name, tgt_sr, net_g, vc, if_f0, version, model_index)))
|
181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
182 |
else:
|
183 |
categories = []
|
184 |
return categories
|
@@ -386,7 +441,7 @@ if __name__ == '__main__':
|
|
386 |
"## No model found, please add the model into weights folder\n\n"+
|
387 |
"</div>"
|
388 |
)
|
389 |
-
for (folder_title,
|
390 |
with gr.TabItem(folder_title):
|
391 |
if description:
|
392 |
gr.Markdown(f"### <center> {description}")
|
@@ -676,4 +731,4 @@ if __name__ == '__main__':
|
|
676 |
tts_voice
|
677 |
]
|
678 |
)
|
679 |
-
app.queue(concurrency_count=5, max_size=50, api_open=config.api).launch(share=config.
|
|
|
132 |
def load_model():
|
133 |
categories = []
|
134 |
if os.path.isfile("weights/folder_info.json"):
|
135 |
+
for _, w_dirs, _ in os.walk(f"weights"):
|
136 |
+
category_count_total = len(w_dirs)
|
137 |
+
category_count = 1
|
138 |
with open("weights/folder_info.json", "r", encoding="utf-8") as f:
|
139 |
folder_info = json.load(f)
|
140 |
for category_name, category_info in folder_info.items():
|
|
|
143 |
category_title = category_info['title']
|
144 |
category_folder = category_info['folder_path']
|
145 |
description = category_info['description']
|
146 |
+
print(f"Load {category_title} [{category_count}/{category_count_total}]")
|
147 |
models = []
|
148 |
+
for _, m_dirs, _ in os.walk(f"weights/{category_folder}"):
|
149 |
+
model_count_total = len(m_dirs)
|
150 |
+
model_count = 1
|
151 |
with open(f"weights/{category_folder}/model_info.json", "r", encoding="utf-8") as f:
|
152 |
models_info = json.load(f)
|
153 |
for character_name, info in models_info.items():
|
|
|
183 |
else:
|
184 |
net_g = net_g.float()
|
185 |
vc = VC(tgt_sr, config)
|
186 |
+
print(f"Model loaded [{model_count}/{model_count_total}]: {character_name} / {info['feature_retrieval_library']} | ({model_version})")
|
187 |
+
model_count += 1
|
188 |
models.append((character_name, model_title, model_author, model_cover, model_version, create_vc_fn(model_name, tgt_sr, net_g, vc, if_f0, version, model_index)))
|
189 |
+
category_count += 1
|
190 |
+
categories.append([category_title, description, models])
|
191 |
+
elif os.path.exists("weights"):
|
192 |
+
models = []
|
193 |
+
for w_root, w_dirs, _ in os.walk("weights"):
|
194 |
+
model_count = 1
|
195 |
+
for sub_dir in w_dirs:
|
196 |
+
pth_files = glob.glob(f"weights/{sub_dir}/*.pth")
|
197 |
+
index_files = glob.glob(f"weights/{sub_dir}/*.index")
|
198 |
+
if pth_files == []:
|
199 |
+
print(f"Model [{model_count}/{len(w_dirs)}]: No Model file detected, skipping...")
|
200 |
+
continue
|
201 |
+
cpt = torch.load(pth_files[0])
|
202 |
+
tgt_sr = cpt["config"][-1]
|
203 |
+
cpt["config"][-3] = cpt["weight"]["emb_g.weight"].shape[0] # n_spk
|
204 |
+
if_f0 = cpt.get("f0", 1)
|
205 |
+
version = cpt.get("version", "v1")
|
206 |
+
if version == "v1":
|
207 |
+
if if_f0 == 1:
|
208 |
+
net_g = SynthesizerTrnMs256NSFsid(*cpt["config"], is_half=config.is_half)
|
209 |
+
else:
|
210 |
+
net_g = SynthesizerTrnMs256NSFsid_nono(*cpt["config"])
|
211 |
+
model_version = "V1"
|
212 |
+
elif version == "v2":
|
213 |
+
if if_f0 == 1:
|
214 |
+
net_g = SynthesizerTrnMs768NSFsid(*cpt["config"], is_half=config.is_half)
|
215 |
+
else:
|
216 |
+
net_g = SynthesizerTrnMs768NSFsid_nono(*cpt["config"])
|
217 |
+
model_version = "V2"
|
218 |
+
del net_g.enc_q
|
219 |
+
print(net_g.load_state_dict(cpt["weight"], strict=False))
|
220 |
+
net_g.eval().to(config.device)
|
221 |
+
if config.is_half:
|
222 |
+
net_g = net_g.half()
|
223 |
+
else:
|
224 |
+
net_g = net_g.float()
|
225 |
+
vc = VC(tgt_sr, config)
|
226 |
+
if index_files == []:
|
227 |
+
print("Warning: No Index file detected!")
|
228 |
+
index_info = "None"
|
229 |
+
model_index = ""
|
230 |
+
else:
|
231 |
+
index_info = index_files[0]
|
232 |
+
model_index = index_files[0]
|
233 |
+
print(f"Model loaded [{model_count}/{len(w_dirs)}]: {index_files[0]} / {index_info} | ({model_version})")
|
234 |
+
model_count += 1
|
235 |
+
models.append((index_files[0][:-4], index_files[0][:-4], "", "", model_version, create_vc_fn(index_files[0], tgt_sr, net_g, vc, if_f0, version, model_index)))
|
236 |
+
categories.append(["Models", "", models])
|
237 |
else:
|
238 |
categories = []
|
239 |
return categories
|
|
|
441 |
"## No model found, please add the model into weights folder\n\n"+
|
442 |
"</div>"
|
443 |
)
|
444 |
+
for (folder_title, description, models) in categories:
|
445 |
with gr.TabItem(folder_title):
|
446 |
if description:
|
447 |
gr.Markdown(f"### <center> {description}")
|
|
|
731 |
tts_voice
|
732 |
]
|
733 |
)
|
734 |
+
app.queue(concurrency_count=5, max_size=50, api_open=config.api).launch(share=config.share)
|
config.py
CHANGED
@@ -11,7 +11,7 @@ class Config:
|
|
11 |
self.gpu_name = None
|
12 |
self.gpu_mem = None
|
13 |
(
|
14 |
-
self.
|
15 |
self.api,
|
16 |
self.unsupported
|
17 |
) = self.arg_parse()
|
@@ -20,13 +20,13 @@ class Config:
|
|
20 |
@staticmethod
|
21 |
def arg_parse() -> tuple:
|
22 |
parser = argparse.ArgumentParser()
|
23 |
-
parser.add_argument("--
|
24 |
parser.add_argument("--api", action="store_true", help="Launch with api")
|
25 |
parser.add_argument("--unsupported", action="store_true", help="Enable unsupported feature")
|
26 |
cmd_opts = parser.parse_args()
|
27 |
|
28 |
return (
|
29 |
-
cmd_opts.
|
30 |
cmd_opts.api,
|
31 |
cmd_opts.unsupported
|
32 |
)
|
|
|
11 |
self.gpu_name = None
|
12 |
self.gpu_mem = None
|
13 |
(
|
14 |
+
self.share,
|
15 |
self.api,
|
16 |
self.unsupported
|
17 |
) = self.arg_parse()
|
|
|
20 |
@staticmethod
|
21 |
def arg_parse() -> tuple:
|
22 |
parser = argparse.ArgumentParser()
|
23 |
+
parser.add_argument("--share", action="store_true", help="Launch with public link")
|
24 |
parser.add_argument("--api", action="store_true", help="Launch with api")
|
25 |
parser.add_argument("--unsupported", action="store_true", help="Enable unsupported feature")
|
26 |
cmd_opts = parser.parse_args()
|
27 |
|
28 |
return (
|
29 |
+
cmd_opts.share,
|
30 |
cmd_opts.api,
|
31 |
cmd_opts.unsupported
|
32 |
)
|