benchmark_results / 2024-06-08 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
68163e0 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.019655680656433104,
"prefill.throughput.value": 356.1311420527669,
"decode.latency.mean": 2.3992926025390626,
"decode.throughput.value": 52.9322683967773,
"per_token.latency.mean": 0.018967608666702693,
"per_token.throughput.value": 52.72145886030866
}
},
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02248379135131836,
"prefill.throughput.value": 311.3353922664626,
"decode.latency.mean": 2.6058619384765622,
"decode.throughput.value": 48.736273447489964,
"per_token.latency.mean": 0.020600977675245685,
"per_token.throughput.value": 48.54138554800769
}
},
{
"model": "google/gemma-2b",
"commit": "25245ec26dc29bcf6102e1b4ddd0dfd02e720cf5",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014054895877838135,
"prefill.throughput.value": 498.0470905542354,
"decode.latency.mean": 1.5597666625976563,
"decode.throughput.value": 81.42243519200012,
"per_token.latency.mean": 0.012334230019641018,
"per_token.throughput.value": 81.07518656678211
}
}
]