benchmark_results / 2024-06-25 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
a318fee verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "74b92c62560b7ade42d35a49f9063adc8b805c4a",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.019320960044860842,
"prefill.throughput.value": 362.30083721237867,
"decode.latency.mean": 2.3275152587890626,
"decode.throughput.value": 54.564626169658,
"per_token.latency.mean": 0.01840009012335374,
"per_token.throughput.value": 54.34755989215407
}
},
{
"model": "google/gemma-2b",
"commit": "74b92c62560b7ade42d35a49f9063adc8b805c4a",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022983008384704587,
"prefill.throughput.value": 304.57283410550235,
"decode.latency.mean": 2.6945513916015624,
"decode.throughput.value": 47.132149862064765,
"per_token.latency.mean": 0.021307865519768633,
"per_token.throughput.value": 46.93102643585946
}
},
{
"model": "google/gemma-2b",
"commit": "74b92c62560b7ade42d35a49f9063adc8b805c4a",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014070799827575683,
"prefill.throughput.value": 497.4841576725109,
"decode.latency.mean": 1.559213195800781,
"decode.throughput.value": 81.4513373424699,
"per_token.latency.mean": 0.012329813972292209,
"per_token.throughput.value": 81.10422446333894
}
}
]