benchmark_results / 2024-06-19 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
e45cbb2 verified
[
{
"model": "google/gemma-2b",
"commit": "22b41b3f8a5cdb37e686d18d8d9a24eb98a331ec",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02068830394744873,
"prefill.throughput.value": 338.35543105809967,
"decode.latency.mean": 2.4355318603515625,
"decode.throughput.value": 52.1446678926499,
"per_token.latency.mean": 0.019248261545957785,
"per_token.throughput.value": 51.952743764020816
}
},
{
"model": "google/gemma-2b",
"commit": "22b41b3f8a5cdb37e686d18d8d9a24eb98a331ec",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.025930047988891602,
"prefill.throughput.value": 269.95707848280074,
"decode.latency.mean": 2.602952880859375,
"decode.throughput.value": 48.79074105946569,
"per_token.latency.mean": 0.020575859446770588,
"per_token.throughput.value": 48.60064302961359
}
},
{
"model": "google/gemma-2b",
"commit": "22b41b3f8a5cdb37e686d18d8d9a24eb98a331ec",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014229311943054199,
"prefill.throughput.value": 491.9422687487664,
"decode.latency.mean": 1.5740656127929689,
"decode.throughput.value": 80.68278664359835,
"per_token.latency.mean": 0.01244737162231928,
"per_token.throughput.value": 80.33824572304952
}
}
]