sharpenb's picture
Upload folder using huggingface_hub (#2)
ea0ba02 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 9.883513450622559,
"base_token_generation_latency_sync": 38.06065979003906,
"base_token_generation_latency_async": 38.11613507568836,
"base_token_generation_throughput_sync": 0.02627384826002707,
"base_token_generation_throughput_async": 0.02623560856876674,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.0922233581543,
"base_inference_latency_async": 38.748931884765625,
"base_inference_throughput_sync": 0.008396853898617971,
"base_inference_throughput_async": 0.02580716296835929,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 10.680657386779785,
"smashed_token_generation_latency_sync": 167.25687408447266,
"smashed_token_generation_latency_async": 168.01605839282274,
"smashed_token_generation_throughput_sync": 0.005978827509923165,
"smashed_token_generation_throughput_async": 0.005951812044429663,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 264.7478271484375,
"smashed_inference_latency_async": 196.02723121643066,
"smashed_inference_throughput_sync": 0.0037771792530683357,
"smashed_inference_throughput_async": 0.005101332063890222,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}