Spaces:
Running
on
T4
Running
on
T4
liuyizhang
commited on
Commit
•
fb4a881
1
Parent(s):
867ce75
update app.py
Browse files- app.py +3 -0
- requirements.txt +1 -0
- utils.py +99 -0
app.py
CHANGED
@@ -57,6 +57,8 @@ from io import BytesIO
|
|
57 |
from diffusers import StableDiffusionInpaintPipeline
|
58 |
from huggingface_hub import hf_hub_download
|
59 |
|
|
|
|
|
60 |
def load_model_hf(model_config_path, repo_id, filename, device='cpu'):
|
61 |
args = SLConfig.fromfile(model_config_path)
|
62 |
model = build_model(args)
|
@@ -721,4 +723,5 @@ if __name__ == "__main__":
|
|
721 |
<a href="https://huggingface.co/spaces/yizhangliu/Grounded-Segment-Anything?duplicate=true"><img style="display: inline; margin-top: 0em; margin-bottom: 0em" src="https://bit.ly/3gLdBN6" alt="Duplicate Space" /></a></p>'
|
722 |
gr.Markdown(DESCRIPTION)
|
723 |
|
|
|
724 |
block.launch(server_name='0.0.0.0', debug=args.debug, share=args.share)
|
|
|
57 |
from diffusers import StableDiffusionInpaintPipeline
|
58 |
from huggingface_hub import hf_hub_download
|
59 |
|
60 |
+
from utils import computer_info
|
61 |
+
|
62 |
def load_model_hf(model_config_path, repo_id, filename, device='cpu'):
|
63 |
args = SLConfig.fromfile(model_config_path)
|
64 |
model = build_model(args)
|
|
|
723 |
<a href="https://huggingface.co/spaces/yizhangliu/Grounded-Segment-Anything?duplicate=true"><img style="display: inline; margin-top: 0em; margin-bottom: 0em" src="https://bit.ly/3gLdBN6" alt="Duplicate Space" /></a></p>'
|
724 |
gr.Markdown(DESCRIPTION)
|
725 |
|
726 |
+
computer_info()
|
727 |
block.launch(server_name='0.0.0.0', debug=args.debug, share=args.share)
|
requirements.txt
CHANGED
@@ -22,6 +22,7 @@ yapf
|
|
22 |
numba
|
23 |
scipy
|
24 |
safetensors
|
|
|
25 |
|
26 |
lama-cleaner==1.1.2
|
27 |
openmim==0.1.5
|
|
|
22 |
numba
|
23 |
scipy
|
24 |
safetensors
|
25 |
+
pynvml
|
26 |
|
27 |
lama-cleaner==1.1.2
|
28 |
openmim==0.1.5
|
utils.py
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import psutil
|
2 |
+
import platform
|
3 |
+
import getpass
|
4 |
+
import datetime
|
5 |
+
from pynvml import *
|
6 |
+
from pprint import pprint
|
7 |
+
|
8 |
+
|
9 |
+
def physical_system_time():
|
10 |
+
return {"system_time": datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S")}
|
11 |
+
|
12 |
+
def physical_username():
|
13 |
+
return {
|
14 |
+
"system_user": getpass.getuser()
|
15 |
+
}
|
16 |
+
|
17 |
+
def physical_platfrom_system():
|
18 |
+
u_name = platform.uname()
|
19 |
+
return {"system_name": u_name.system, "system_version": u_name.version}
|
20 |
+
|
21 |
+
def physical_cpu():
|
22 |
+
return {"system_cpu_count": psutil.cpu_count(logical=False)}
|
23 |
+
|
24 |
+
def physical_memory():
|
25 |
+
# return round(psutil.virtual_memory().total / (1024.0 * 1024.0 * 1024.0), 2)
|
26 |
+
return {"system_memory": round(psutil.virtual_memory().total, 2)}
|
27 |
+
|
28 |
+
def physical_hard_disk():
|
29 |
+
result = []
|
30 |
+
for disk_partition in psutil.disk_partitions():
|
31 |
+
o_usage = psutil.disk_usage(disk_partition.device)
|
32 |
+
result.append(
|
33 |
+
{
|
34 |
+
"device": disk_partition.device,
|
35 |
+
"fstype":disk_partition.fstype,
|
36 |
+
"opts": disk_partition.opts,
|
37 |
+
"total": o_usage.total,
|
38 |
+
}
|
39 |
+
)
|
40 |
+
return {"system_hard_disk": result}
|
41 |
+
|
42 |
+
def nvidia_info():
|
43 |
+
nvidia_dict = {
|
44 |
+
"state": True,
|
45 |
+
"nvidia_version": "",
|
46 |
+
"nvidia_count": 0,
|
47 |
+
"gpus": []
|
48 |
+
}
|
49 |
+
try:
|
50 |
+
nvmlInit()
|
51 |
+
nvidia_dict["nvidia_version"] = nvmlSystemGetDriverVersion()
|
52 |
+
nvidia_dict["nvidia_count"] = nvmlDeviceGetCount()
|
53 |
+
for i in range(nvidia_dict["nvidia_count"]):
|
54 |
+
handle = nvmlDeviceGetHandleByIndex(i)
|
55 |
+
memory_info = nvmlDeviceGetMemoryInfo(handle)
|
56 |
+
gpu = {
|
57 |
+
"gpu_name": nvmlDeviceGetName(handle),
|
58 |
+
"total": memory_info.total,
|
59 |
+
"free": memory_info.free,
|
60 |
+
"used": memory_info.used,
|
61 |
+
"temperature": f"{nvmlDeviceGetTemperature(handle, 0)}℃",
|
62 |
+
"powerStatus": nvmlDeviceGetPowerState(handle)
|
63 |
+
}
|
64 |
+
nvidia_dict['gpus'].append(gpu)
|
65 |
+
except NVMLError as e1:
|
66 |
+
nvidia_dict["state"] = False
|
67 |
+
print(f'nvidia_error_1_{e1}')
|
68 |
+
except Exception as e2:
|
69 |
+
nvidia_dict["state"] = False
|
70 |
+
print(f'nvidia_erro_2_{e2}')
|
71 |
+
finally:
|
72 |
+
try:
|
73 |
+
nvmlShutdown()
|
74 |
+
except:
|
75 |
+
pass
|
76 |
+
return nvidia_dict
|
77 |
+
|
78 |
+
def merge(info_list):
|
79 |
+
data = {}
|
80 |
+
for item in info_list:
|
81 |
+
data.update(
|
82 |
+
item()
|
83 |
+
)
|
84 |
+
return data
|
85 |
+
|
86 |
+
def computer_info():
|
87 |
+
data = merge(
|
88 |
+
[
|
89 |
+
physical_system_time,
|
90 |
+
physical_username,
|
91 |
+
physical_platfrom_system,
|
92 |
+
physical_cpu,
|
93 |
+
physical_memory,
|
94 |
+
physical_hard_disk,
|
95 |
+
nvidia_info
|
96 |
+
]
|
97 |
+
)
|
98 |
+
pprint(data)
|
99 |
+
|