File size: 1,389 Bytes
38a97c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
{
"os": "Linux-5.15.133+-x86_64-with-glibc2.31",
"python": "3.10.13",
"heartbeatAt": "2024-05-22T11:23:00.864653",
"startedAt": "2024-05-22T11:22:59.891144",
"docker": null,
"cuda": null,
"args": [],
"state": "running",
"program": "kaggle.ipynb",
"codePathLocal": null,
"root": "/kaggle/working",
"host": "2c1b614ec68f",
"username": "root",
"executable": "/opt/conda/bin/python3.10",
"cpu_count": 2,
"cpu_count_logical": 4,
"cpu_freq": {
"current": 2000.144,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 2000.144,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.144,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.144,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.144,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 8062.387607574463,
"used": 5656.321590423584
}
},
"gpu": "Tesla P100-PCIE-16GB",
"gpu_count": 1,
"gpu_devices": [
{
"name": "Tesla P100-PCIE-16GB",
"memory_total": 17179869184
}
],
"memory": {
"total": 31.357563018798828
}
}
|