YaTharThShaRma999 commited on
Commit
053eae3
1 Parent(s): 064551e

Create tester.py

Browse files
Files changed (1) hide show
  1. tester.py +40 -0
tester.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import re
3
+ import sys
4
+
5
+ def install_llama_cpp_python(cuda=None):
6
+ try:
7
+ # Detect CUDA version
8
+ result = subprocess.run(['nvcc', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
9
+ version_output = result.stdout
10
+ version_match = re.search(r"release (\d+\.\d+),", version_output)
11
+ if not version_match:
12
+ raise RuntimeError("CUDA version not found in nvcc output.")
13
+
14
+ cuda_version = version_match.group(1)
15
+ version_map = {
16
+ "12.1": "cu121",
17
+ "12.2": "cu122",
18
+ "12.3": "cu123",
19
+ "12.4": "cu124",
20
+ }
21
+ major_minor = '.'.join(cuda_version.split('.')[:2])
22
+ if cuda is None:
23
+ cuda_suffix = version_map.get(major_minor)
24
+ else:
25
+ cuda_suffix = cuda
26
+
27
+ if not cuda_suffix:
28
+ raise ValueError(f"No suitable wheel found for CUDA version {cuda_version}.")
29
+
30
+ extra_index_url = f"https://abetlen.github.io/llama-cpp-python/whl/{cuda_suffix}"
31
+ subprocess.run([sys.executable, '-m', 'pip', 'install', 'llama-cpp-python', '--extra-index-url', extra_index_url])
32
+ print(f"Successfully installed llama-cpp-python with CUDA {cuda_suffix}")
33
+
34
+ except FileNotFoundError:
35
+ print("Error: nvcc (CUDA) is not installed or not in PATH.")
36
+ except Exception as e:
37
+ print(f"An error occurred: {e}")
38
+
39
+ # Run the function
40
+ install_llama_cpp_python()