sharpenb's picture
Upload folder using huggingface_hub (#1)
0d9a7fc verified
raw
history blame
1.49 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 82.928955078125,
"base_token_generation_latency_async": 82.9247709363699,
"base_token_generation_throughput_sync": 0.012058514412221022,
"base_token_generation_throughput_async": 0.012059122849640709,
"base_token_generation_CO2_emissions": 2.0673200656993337e-05,
"base_token_generation_energy_consumption": 0.00675816312091373,
"base_inference_latency_sync": 81.18558807373047,
"base_inference_latency_async": 80.05075454711914,
"base_inference_throughput_sync": 0.012317457121722492,
"base_inference_throughput_async": 0.012492074630119622,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 52.46407852172852,
"smashed_token_generation_latency_async": 52.498188242316246,
"smashed_token_generation_throughput_sync": 0.019060660706846116,
"smashed_token_generation_throughput_async": 0.019048276397354767,
"smashed_token_generation_CO2_emissions": 1.7540604124074345e-05,
"smashed_token_generation_energy_consumption": 0.004500894375549395,
"smashed_inference_latency_sync": 56.9942024230957,
"smashed_inference_latency_async": 54.01430130004883,
"smashed_inference_throughput_sync": 0.017545644249506526,
"smashed_inference_throughput_async": 0.018513615393171735
}