Spaces:
Runtime error
Runtime error
unclemusclez
commited on
Commit
•
5e25d21
1
Parent(s):
230d1b0
Update app.py
Browse files
app.py
CHANGED
@@ -46,21 +46,22 @@ def process_model(model_id, q_method, use_imatrix, imatrix_q_method, private_rep
|
|
46 |
)
|
47 |
|
48 |
dl_pattern += pattern
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
|
|
64 |
|
65 |
### Ollamafy ###
|
66 |
if ollama_model:
|
@@ -157,8 +158,7 @@ with gr.Blocks(css=css) as demo:
|
|
157 |
search_type="model",
|
158 |
)
|
159 |
|
160 |
-
ollama_q_method
|
161 |
-
latest = gr.Dropdown(
|
162 |
ollama_q_methods,
|
163 |
label="Ollama Lastest Quantization Method",
|
164 |
info="Chose which quantization will be labled with the latest tag in the Ollama Library",
|
|
|
46 |
)
|
47 |
|
48 |
dl_pattern += pattern
|
49 |
+
if not os.path.isfile(fp16):
|
50 |
+
api.snapshot_download(repo_id=model_id, local_dir=model_name, local_dir_use_symlinks=False, allow_patterns=dl_pattern)
|
51 |
+
print("Model downloaded successfully!")
|
52 |
+
print(f"Current working directory: {os.getcwd()}")
|
53 |
+
print(f"Model directory contents: {os.listdir(model_name)}")
|
54 |
+
|
55 |
+
conversion_script = "convert_hf_to_gguf.py"
|
56 |
+
fp16_conversion = f"python llama.cpp/{conversion_script} {model_name} --outtype f16 --outfile {fp16}"
|
57 |
+
result = subprocess.run(fp16_conversion, shell=True, capture_output=True)
|
58 |
+
print(result)
|
59 |
+
if result.returncode != 0:
|
60 |
+
raise Exception(f"Error converting to fp16: {result.stderr}")
|
61 |
+
print("Model converted to fp16 successfully!")
|
62 |
+
print(f"Converted model path: {fp16}")
|
63 |
+
|
64 |
+
HfApi().delete_repo(repo_id=model_id)
|
65 |
|
66 |
### Ollamafy ###
|
67 |
if ollama_model:
|
|
|
158 |
search_type="model",
|
159 |
)
|
160 |
|
161 |
+
ollama_q_method = gr.Dropdown(
|
|
|
162 |
ollama_q_methods,
|
163 |
label="Ollama Lastest Quantization Method",
|
164 |
info="Chose which quantization will be labled with the latest tag in the Ollama Library",
|