benchmark_results / 2024-06-09 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
b07f607 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022692079544067385,
"prefill.throughput.value": 308.47767770275067,
"decode.latency.mean": 2.385437255859375,
"decode.throughput.value": 53.23971514574468,
"per_token.latency.mean": 0.018857490215376905,
"per_token.throughput.value": 53.029326202941526
}
},
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021704048156738282,
"prefill.throughput.value": 322.52047864291,
"decode.latency.mean": 2.5922081298828124,
"decode.throughput.value": 48.99297958985314,
"per_token.latency.mean": 0.020496448750552454,
"per_token.throughput.value": 48.78893959486744
}
},
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01405294418334961,
"prefill.throughput.value": 498.1162600997043,
"decode.latency.mean": 1.5584067993164064,
"decode.throughput.value": 81.49348427875726,
"per_token.latency.mean": 0.012323451479433083,
"per_token.throughput.value": 81.14609788247432
}
}
]