benchmark_results / 2024-08-28 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
eb3dd77 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "c35d2ccf5a5ebcab24a0b9da3cb2527b3ae565a0",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02051244831085205,
"prefill.throughput.value": 341.25619204103833,
"decode.latency.mean": 2.427037475585937,
"decode.throughput.value": 52.32716893641684,
"per_token.latency.mean": 0.019109263142262856,
"per_token.throughput.value": 52.33064156138798
}
},
{
"model": "google/gemma-2b",
"commit": "c35d2ccf5a5ebcab24a0b9da3cb2527b3ae565a0",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021654767990112303,
"prefill.throughput.value": 323.25444461913617,
"decode.latency.mean": 2.5708712158203126,
"decode.throughput.value": 49.399596221888885,
"per_token.latency.mean": 0.02024205705687756,
"per_token.throughput.value": 49.402093729413444
}
},
{
"model": "google/gemma-2b",
"commit": "c35d2ccf5a5ebcab24a0b9da3cb2527b3ae565a0",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014148575782775879,
"prefill.throughput.value": 494.749443864281,
"decode.latency.mean": 1.5675462036132815,
"decode.throughput.value": 81.0183455564231,
"per_token.latency.mean": 0.012341764037064678,
"per_token.throughput.value": 81.0256943008154
}
}
]