File size: 1,446 Bytes
ab0cf6d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
{
"os": "Linux-5.15.133+-x86_64-with-glibc2.31",
"python": "3.10.13",
"heartbeatAt": "2024-04-29T15:31:18.145699",
"startedAt": "2024-04-29T15:31:17.258763",
"docker": null,
"cuda": null,
"args": [],
"state": "running",
"program": "kaggle.ipynb",
"codePathLocal": null,
"root": "/kaggle/working",
"host": "521511c36b08",
"username": "root",
"executable": "/opt/conda/bin/python3.10",
"cpu_count": 2,
"cpu_count_logical": 4,
"cpu_freq": {
"current": 2000.2,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 2000.2,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.2,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.2,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.2,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 8062.387607574463,
"used": 5605.14933013916
}
},
"gpu": "Tesla T4",
"gpu_count": 2,
"gpu_devices": [
{
"name": "Tesla T4",
"memory_total": 16106127360
},
{
"name": "Tesla T4",
"memory_total": 16106127360
}
],
"memory": {
"total": 31.35755157470703
}
}
|