sharpenb's picture
Upload folder using huggingface_hub (#2)
79cad06 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.891793251037598,
"base_token_generation_latency_sync": 36.54602737426758,
"base_token_generation_latency_async": 36.249685660004616,
"base_token_generation_throughput_sync": 0.02736275518427784,
"base_token_generation_throughput_async": 0.027586446110988778,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.13349075317383,
"base_inference_latency_async": 39.10982608795166,
"base_inference_throughput_sync": 0.00839394525987529,
"base_inference_throughput_async": 0.025569021906442696,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.47966194152832,
"smashed_token_generation_latency_sync": 167.53070373535155,
"smashed_token_generation_latency_async": 167.87611786276102,
"smashed_token_generation_throughput_sync": 0.005969055090819061,
"smashed_token_generation_throughput_async": 0.005956773439432889,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 265.20934295654297,
"smashed_inference_latency_async": 196.3787078857422,
"smashed_inference_throughput_sync": 0.003770606226960335,
"smashed_inference_throughput_async": 0.005092201750211249,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}