benchmark_results / 2024-06-07 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
f23d28c verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "bdf36dcd48106a4a0278ed7f3cc26cd65ab7b066",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020219680786132812,
"prefill.throughput.value": 346.19735464868387,
"decode.latency.mean": 2.454503173828125,
"decode.throughput.value": 51.74163201505523,
"per_token.latency.mean": 0.019405411166164715,
"per_token.throughput.value": 51.5320181282013
}
},
{
"model": "google/gemma-2b",
"commit": "bdf36dcd48106a4a0278ed7f3cc26cd65ab7b066",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02271752071380615,
"prefill.throughput.value": 308.1322160188845,
"decode.latency.mean": 2.6328499755859376,
"decode.throughput.value": 48.23670212038432,
"per_token.latency.mean": 0.02081991943540309,
"per_token.throughput.value": 48.03092553276439
}
},
{
"model": "google/gemma-2b",
"commit": "bdf36dcd48106a4a0278ed7f3cc26cd65ab7b066",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014084112167358397,
"prefill.throughput.value": 497.01393434108905,
"decode.latency.mean": 1.5616133422851561,
"decode.throughput.value": 81.32614941298917,
"per_token.latency.mean": 0.012348921915288028,
"per_token.throughput.value": 80.97872890118407
}
}
]