hf-transformers-bot's picture
Upload folder using huggingface_hub
b07f607 verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5": {
"metrics": {
"prefill.latency.mean": 0.022692079544067385,
"prefill.throughput.value": 308.47767770275067,
"decode.latency.mean": 2.385437255859375,
"decode.throughput.value": 53.23971514574468,
"per_token.latency.mean": 0.018857490215376905,
"per_token.throughput.value": 53.029326202941526
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5": {
"metrics": {
"prefill.latency.mean": 0.021704048156738282,
"prefill.throughput.value": 322.52047864291,
"decode.latency.mean": 2.5922081298828124,
"decode.throughput.value": 48.99297958985314,
"per_token.latency.mean": 0.020496448750552454,
"per_token.throughput.value": 48.78893959486744
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5": {
"metrics": {
"prefill.latency.mean": 0.01405294418334961,
"prefill.throughput.value": 498.1162600997043,
"decode.latency.mean": 1.5584067993164064,
"decode.throughput.value": 81.49348427875726,
"per_token.latency.mean": 0.012323451479433083,
"per_token.throughput.value": 81.14609788247432
}
}
}
}
}