sharpenb's picture
Upload folder using huggingface_hub (#2)
f8daf69 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 21.553375244140625,
"base_token_generation_latency_sync": 21.522614669799804,
"base_token_generation_latency_async": 21.35719507932663,
"base_token_generation_throughput_sync": 0.04646275628412306,
"base_token_generation_throughput_async": 0.046822627984888406,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 40.58705940246582,
"base_inference_latency_async": 18.87080669403076,
"base_inference_throughput_sync": 0.02463839496436261,
"base_inference_throughput_async": 0.05299190523298197,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 23.815746307373047,
"smashed_token_generation_latency_sync": 25.672776222229004,
"smashed_token_generation_latency_async": 24.77431520819664,
"smashed_token_generation_throughput_sync": 0.03895176709148195,
"smashed_token_generation_throughput_async": 0.04036438511402921,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 51.47955169677734,
"smashed_inference_latency_async": 26.007390022277832,
"smashed_inference_throughput_sync": 0.019425188585366036,
"smashed_inference_throughput_async": 0.0384506095822535,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}