sharpenb's picture
Upload folder using huggingface_hub (#1)
af15391 verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 54.78649673461914,
"base_token_generation_latency_async": 54.92506679147482,
"base_token_generation_throughput_sync": 0.01825267282271962,
"base_token_generation_throughput_async": 0.018206623285439767,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 53.236223983764646,
"base_inference_latency_async": 52.28421688079834,
"base_inference_throughput_sync": 0.01878420228123182,
"base_inference_throughput_async": 0.01912623081416479,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 167.07427520751952,
"smashed_token_generation_latency_async": 167.65917092561722,
"smashed_token_generation_throughput_sync": 0.005985361892235776,
"smashed_token_generation_throughput_async": 0.00596448136108018,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 173.8080261230469,
"smashed_inference_latency_async": 141.2287473678589,
"smashed_inference_throughput_sync": 0.0057534742342223765,
"smashed_inference_throughput_async": 0.0070807113894120815,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}