sharpenb's picture
Upload folder using huggingface_hub (#1)
e60e705 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 47.90380516052246,
"base_token_generation_latency_async": 48.11076000332832,
"base_token_generation_throughput_sync": 0.020875168405705278,
"base_token_generation_throughput_async": 0.02078537108810627,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 48.23736343383789,
"base_inference_latency_async": 45.93331813812256,
"base_inference_throughput_sync": 0.02073081795549615,
"base_inference_throughput_async": 0.021770689350004646,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 35.530513763427734,
"smashed_token_generation_latency_async": 35.968757420778275,
"smashed_token_generation_throughput_sync": 0.02814482240978232,
"smashed_token_generation_throughput_async": 0.02780190564554572,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 60.0002555847168,
"smashed_inference_latency_async": 34.74392890930176,
"smashed_inference_throughput_sync": 0.016666595671214424,
"smashed_inference_throughput_async": 0.028782006853930578,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}