benchmark_results / 2024-06-13 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
148f5d8 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "348e2294ac1a3cc744a93f04ab1f886439ee90d3",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020395471572875974,
"prefill.throughput.value": 343.213442012752,
"decode.latency.mean": 2.4265584716796873,
"decode.throughput.value": 52.33749834682095,
"per_token.latency.mean": 0.019179596934865113,
"per_token.throughput.value": 52.13873906714781
}
},
{
"model": "google/gemma-2b",
"commit": "348e2294ac1a3cc744a93f04ab1f886439ee90d3",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02268723201751709,
"prefill.throughput.value": 308.54358938962736,
"decode.latency.mean": 2.5942015380859376,
"decode.throughput.value": 48.955332935969025,
"per_token.latency.mean": 0.020515317818864065,
"per_token.throughput.value": 48.74406571856707
}
},
{
"model": "google/gemma-2b",
"commit": "348e2294ac1a3cc744a93f04ab1f886439ee90d3",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014029391765594482,
"prefill.throughput.value": 498.95249323400594,
"decode.latency.mean": 1.5590690307617188,
"decode.throughput.value": 81.4588690392697,
"per_token.latency.mean": 0.012328717148822287,
"per_token.throughput.value": 81.11143989506856
}
}
]