hf-transformers-bot's picture
Upload folder using huggingface_hub
4472f8d verified
[
{
"model": "google/gemma-2b",
"commit": "2b789f27f383435b8db2fee3d10b0a1358c0c234",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02078020763397217,
"prefill.throughput.value": 336.8590017626277,
"decode.latency.mean": 2.473873046875,
"decode.throughput.value": 51.336506600622286,
"per_token.latency.mean": 0.019477971662686566,
"per_token.throughput.value": 51.340047994610934
}
},
{
"model": "google/gemma-2b",
"commit": "2b789f27f383435b8db2fee3d10b0a1358c0c234",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022303232192993162,
"prefill.throughput.value": 313.85585458771027,
"decode.latency.mean": 2.6093214111328127,
"decode.throughput.value": 48.671658254957606,
"per_token.latency.mean": 0.020544749890725446,
"per_token.throughput.value": 48.67423576917974
}
},
{
"model": "google/gemma-2b",
"commit": "2b789f27f383435b8db2fee3d10b0a1358c0c234",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014169424057006837,
"prefill.throughput.value": 494.02149105266363,
"decode.latency.mean": 1.5656774291992188,
"decode.throughput.value": 81.11504811368164,
"per_token.latency.mean": 0.012327069214948519,
"per_token.throughput.value": 81.12228320964905
}
}
]