Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ import datetime
|
|
18 |
import psutil
|
19 |
import subprocess
|
20 |
from gpustat import GPUStatCollection
|
21 |
-
import
|
22 |
|
23 |
try:
|
24 |
import spaces
|
@@ -74,18 +74,17 @@ def update_gpu_status():
|
|
74 |
return gpu_status
|
75 |
|
76 |
except Exception as e:
|
77 |
-
print(f"Error getting GPU stats: {e}")
|
78 |
return torch_update_gpu_status()
|
79 |
|
80 |
def torch_update_gpu_status():
|
81 |
if torch.cuda.is_available():
|
82 |
gpu_info = torch.cuda.get_device_name(0)
|
83 |
gpu_memory = torch.cuda.mem_get_info(0)
|
84 |
-
total_memory = gpu_memory[1] / (1024 * 1024)
|
85 |
-
free_memory=gpu_memory[0] /(1024 *1024)
|
86 |
-
used_memory = (gpu_memory[1] - gpu_memory[0]) / (1024 * 1024)
|
87 |
|
88 |
-
gpu_status = f"**GPU Name**: {gpu_info}\n**Free Memory**: {free_memory}
|
89 |
else:
|
90 |
gpu_status = "No GPU available"
|
91 |
return gpu_status
|
@@ -97,15 +96,15 @@ def update_cpu_status():
|
|
97 |
cpu_percent = psutil.cpu_percent()
|
98 |
cpu_freq = psutil.cpu_freq()
|
99 |
cpu_count = psutil.cpu_count(logical=True)
|
100 |
-
cpu_name =
|
101 |
virtual_memory = psutil.virtual_memory()
|
102 |
|
103 |
cpu_status = f"**{time_str} (UTC+0)**\n\n"
|
104 |
cpu_status += f"**CPU Name**: {cpu_name}\n"
|
105 |
cpu_status += f"**CPU Usage**: {cpu_percent}%\n"
|
106 |
-
cpu_status += f"**CPU Frequency**: Current
|
107 |
cpu_status += f"**CPU Cores**: {cpu_count}\n"
|
108 |
-
cpu_status += f"**Virtual Memory**: Total
|
109 |
|
110 |
return cpu_status
|
111 |
|
|
|
18 |
import psutil
|
19 |
import subprocess
|
20 |
from gpustat import GPUStatCollection
|
21 |
+
import cpuinfo
|
22 |
|
23 |
try:
|
24 |
import spaces
|
|
|
74 |
return gpu_status
|
75 |
|
76 |
except Exception as e:
|
|
|
77 |
return torch_update_gpu_status()
|
78 |
|
79 |
def torch_update_gpu_status():
|
80 |
if torch.cuda.is_available():
|
81 |
gpu_info = torch.cuda.get_device_name(0)
|
82 |
gpu_memory = torch.cuda.mem_get_info(0)
|
83 |
+
total_memory = gpu_memory[1] / (1024 * 1024 * 1024)
|
84 |
+
free_memory=gpu_memory[0] /(1024 *1024 * 1024)
|
85 |
+
used_memory = (gpu_memory[1] - gpu_memory[0]) / (1024 * 1024 * 1024)
|
86 |
|
87 |
+
gpu_status = f"**GPU Name**: {gpu_info}\n**Free Memory**: {free_memory}GB\n**Total Memory**: {total_memory:.2f} GB\n**Used Memory**: {used_memory:.2f} GB\n"
|
88 |
else:
|
89 |
gpu_status = "No GPU available"
|
90 |
return gpu_status
|
|
|
96 |
cpu_percent = psutil.cpu_percent()
|
97 |
cpu_freq = psutil.cpu_freq()
|
98 |
cpu_count = psutil.cpu_count(logical=True)
|
99 |
+
cpu_name = cpuinfo.get_cpu_info().get("brand_raw", "Unknown CPU")
|
100 |
virtual_memory = psutil.virtual_memory()
|
101 |
|
102 |
cpu_status = f"**{time_str} (UTC+0)**\n\n"
|
103 |
cpu_status += f"**CPU Name**: {cpu_name}\n"
|
104 |
cpu_status += f"**CPU Usage**: {cpu_percent}%\n"
|
105 |
+
cpu_status += f"**CPU Frequency**: *Current*: {cpu_freq.current:.2f}MHz, *Max*: {cpu_freq.max:.2f}MHz, *Min*: {cpu_freq.min:.2f}MHz\n"
|
106 |
cpu_status += f"**CPU Cores**: {cpu_count}\n"
|
107 |
+
cpu_status += f"**Virtual Memory**: *Total*: {virtual_memory.total / (1024 * 1024 * 1024)}GB, *Available*: {virtual_memory.available / (1024 * 1024 * 1024)}GB, *Used*: {virtual_memory.used / (1024 * 1024 * 1024)}GB, *Percentage*: {virtual_memory.percent}%\n\n"
|
108 |
|
109 |
return cpu_status
|
110 |
|