sharpenb's picture
Upload folder using huggingface_hub (#1)
c6a447d verified
raw
history blame
No virus
1.62 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 56.57164268493652,
"base_token_generation_latency_async": 57.186380587518215,
"base_token_generation_throughput_sync": 0.017676700773376564,
"base_token_generation_throughput_async": 0.017486681089557626,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 54.740274810791014,
"base_inference_latency_async": 54.187893867492676,
"base_inference_throughput_sync": 0.01826808512482785,
"base_inference_throughput_async": 0.018454306462718974,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 172.90437774658204,
"smashed_token_generation_latency_async": 173.57398755848408,
"smashed_token_generation_throughput_sync": 0.00578354355761688,
"smashed_token_generation_throughput_async": 0.0057612319338060936,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 180.40770568847657,
"smashed_inference_latency_async": 157.52835273742676,
"smashed_inference_throughput_sync": 0.005543000484285159,
"smashed_inference_throughput_async": 0.006348063587428173,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}