benchmark_results / 2024-06-24 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
0a791a4 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "4b822560a1dfd5d63c985ecf9a3c0aae0a4feeee",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021005280494689943,
"prefill.throughput.value": 333.24953702806175,
"decode.latency.mean": 2.5191395263671876,
"decode.throughput.value": 50.4140396634341,
"per_token.latency.mean": 0.019915351091166256,
"per_token.throughput.value": 50.212521758833795
}
},
{
"model": "google/gemma-2b",
"commit": "4b822560a1dfd5d63c985ecf9a3c0aae0a4feeee",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02270400047302246,
"prefill.throughput.value": 308.3157088689105,
"decode.latency.mean": 2.5880667724609374,
"decode.throughput.value": 49.07137688694114,
"per_token.latency.mean": 0.02046005420910982,
"per_token.throughput.value": 48.87572583042086
}
},
{
"model": "google/gemma-2b",
"commit": "4b822560a1dfd5d63c985ecf9a3c0aae0a4feeee",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014068719863891601,
"prefill.throughput.value": 497.55770729119513,
"decode.latency.mean": 1.5730482177734375,
"decode.throughput.value": 80.73496957376263,
"per_token.latency.mean": 0.012439329396123472,
"per_token.throughput.value": 80.39018568891944
}
}
]