sharpenb's picture
Upload folder using huggingface_hub (#2)
77e320f verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.711685180664062,
"base_token_generation_latency_sync": 38.78611068725586,
"base_token_generation_latency_async": 38.81423454731703,
"base_token_generation_throughput_sync": 0.025782425262055854,
"base_token_generation_throughput_async": 0.02576374393731599,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.54264373779297,
"base_inference_latency_async": 39.09869194030762,
"base_inference_throughput_sync": 0.008435782841252652,
"base_inference_throughput_async": 0.02557630320540417,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 210114.78125,
"smashed_token_generation_latency_sync": 168.86386260986328,
"smashed_token_generation_latency_async": 169.0822370350361,
"smashed_token_generation_throughput_sync": 0.0059219301545314195,
"smashed_token_generation_throughput_async": 0.0059142818165623555,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 265.30672760009764,
"smashed_inference_latency_async": 212.2807502746582,
"smashed_inference_throughput_sync": 0.003769222171807572,
"smashed_inference_throughput_async": 0.004710742724934578,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}