benchmark_results / 2024-09-01 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
a15f9b8 verified
[
{
"model": "google/gemma-2b",
"commit": "eb5b968c5d80271ecb29917dffecc8f4c00247a8",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020699952125549315,
"prefill.throughput.value": 338.1650333074981,
"decode.latency.mean": 2.4684188232421875,
"decode.throughput.value": 51.44993985793288,
"per_token.latency.mean": 0.019435116857994265,
"per_token.throughput.value": 51.45325378317286
}
},
{
"model": "google/gemma-2b",
"commit": "eb5b968c5d80271ecb29917dffecc8f4c00247a8",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02248211193084717,
"prefill.throughput.value": 311.35864911318527,
"decode.latency.mean": 2.6452998046875003,
"decode.throughput.value": 48.009681086035926,
"per_token.latency.mean": 0.020828123963723973,
"per_token.throughput.value": 48.01200538952451
}
},
{
"model": "google/gemma-2b",
"commit": "eb5b968c5d80271ecb29917dffecc8f4c00247a8",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01416041612625122,
"prefill.throughput.value": 494.3357552200096,
"decode.latency.mean": 1.5644852905273439,
"decode.throughput.value": 81.17685782599585,
"per_token.latency.mean": 0.012317679724355383,
"per_token.throughput.value": 81.18412090409606
}
}
]