benchmark_results / 2024-09-10 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
438750d verified
[
{
"model": "google/gemma-2b",
"commit": "f745e7d3f902601686b83c7cce2660c2a94509f0",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021567856788635254,
"prefill.throughput.value": 324.5570511989169,
"decode.latency.mean": 2.60877099609375,
"decode.throughput.value": 48.68192730989565,
"per_token.latency.mean": 0.02054022249086635,
"per_token.throughput.value": 48.684964364172366
}
},
{
"model": "google/gemma-2b",
"commit": "f745e7d3f902601686b83c7cce2660c2a94509f0",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02229771137237549,
"prefill.throughput.value": 313.9335639922338,
"decode.latency.mean": 2.6286884765625,
"decode.throughput.value": 48.313066052648495,
"per_token.latency.mean": 0.020697333951634683,
"per_token.throughput.value": 48.31540150711148
}
},
{
"model": "google/gemma-2b",
"commit": "f745e7d3f902601686b83c7cce2660c2a94509f0",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01422929573059082,
"prefill.throughput.value": 491.9428292540906,
"decode.latency.mean": 1.564611572265625,
"decode.throughput.value": 81.17030594123659,
"per_token.latency.mean": 0.01231867967064925,
"per_token.throughput.value": 81.17753093155116
}
}
]