benchmark_results / 2024-08-23 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
8993785 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "d806fa3e92289876e01ab19c9e19e9264ea1c1a1",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020006303787231446,
"prefill.throughput.value": 349.8897184830106,
"decode.latency.mean": 2.476673583984375,
"decode.throughput.value": 51.27845704870296,
"per_token.latency.mean": 0.019500064234095296,
"per_token.throughput.value": 51.28188235665034
}
},
{
"model": "google/gemma-2b",
"commit": "d806fa3e92289876e01ab19c9e19e9264ea1c1a1",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021566495895385743,
"prefill.throughput.value": 324.5775314615521,
"decode.latency.mean": 2.60439697265625,
"decode.throughput.value": 48.763687461390134,
"per_token.latency.mean": 0.020506019329461528,
"per_token.throughput.value": 48.76616879821595
}
},
{
"model": "google/gemma-2b",
"commit": "d806fa3e92289876e01ab19c9e19e9264ea1c1a1",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014093887805938721,
"prefill.throughput.value": 496.669201315085,
"decode.latency.mean": 1.5648489379882813,
"decode.throughput.value": 81.15799353978989,
"per_token.latency.mean": 0.012320554320267805,
"per_token.throughput.value": 81.16517926104672
}
}
]