sharpenb's picture
Upload folder using huggingface_hub (#1)
3bb76de verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 49.486734008789064,
"base_token_generation_latency_async": 48.929936066269875,
"base_token_generation_throughput_sync": 0.020207435791224283,
"base_token_generation_throughput_async": 0.020437386197390835,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 48.96522331237793,
"base_inference_latency_async": 46.851134300231934,
"base_inference_throughput_sync": 0.020422657803895072,
"base_inference_throughput_async": 0.021344200411281176,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 177.7585906982422,
"smashed_token_generation_latency_async": 178.23677975684404,
"smashed_token_generation_throughput_sync": 0.00562560715671723,
"smashed_token_generation_throughput_async": 0.005610514290957399,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 185.27344818115233,
"smashed_inference_latency_async": 132.53157138824463,
"smashed_inference_throughput_sync": 0.005397427477153895,
"smashed_inference_throughput_async": 0.0075453719406265085,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}