benchmark_results / 2024-08-27 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
a0abdd7 verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "9578c2597e2d88b6f0b304b5a05864fd613ddcc1",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020687775611877443,
"prefill.throughput.value": 338.3640721615861,
"decode.latency.mean": 2.504463623046875,
"decode.throughput.value": 50.7094608327729,
"per_token.latency.mean": 0.019718889867226908,
"per_token.throughput.value": 50.71279401291322
}
},
{
"model": "google/gemma-2b",
"commit": "9578c2597e2d88b6f0b304b5a05864fd613ddcc1",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02237489604949951,
"prefill.throughput.value": 312.8506154627064,
"decode.latency.mean": 2.6394550781249997,
"decode.throughput.value": 48.1159922184459,
"per_token.latency.mean": 0.02078207992193267,
"per_token.throughput.value": 48.118379091817246
}
},
{
"model": "google/gemma-2b",
"commit": "9578c2597e2d88b6f0b304b5a05864fd613ddcc1",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.0141245436668396,
"prefill.throughput.value": 495.5912321920887,
"decode.latency.mean": 1.5631802978515625,
"decode.throughput.value": 81.24462685113738,
"per_token.latency.mean": 0.012307403571962371,
"per_token.throughput.value": 81.25190615168505
}
}
]