sharpenb's picture
Upload folder using huggingface_hub (#2)
516957e verified
raw
history blame
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.945297241210938,
"base_token_generation_latency_sync": 56.39304389953613,
"base_token_generation_latency_async": 49.49269890785217,
"base_token_generation_throughput_sync": 0.01773268351645451,
"base_token_generation_throughput_async": 0.020205000375143146,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 122.70581817626953,
"base_inference_latency_async": 39.78419303894043,
"base_inference_throughput_sync": 0.008149572814579,
"base_inference_throughput_async": 0.02513561099558331,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 12.72653579711914,
"smashed_token_generation_latency_sync": 167.0605026245117,
"smashed_token_generation_latency_async": 167.2410013154149,
"smashed_token_generation_throughput_sync": 0.005985855329596479,
"smashed_token_generation_throughput_async": 0.005979394957783168,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 266.14876251220704,
"smashed_inference_latency_async": 195.19712924957275,
"smashed_inference_throughput_sync": 0.0037572971993590784,
"smashed_inference_throughput_async": 0.005123026162548898,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}