benchmark_results / 2024-06-15 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
a7788e8 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "eed9ed679878ada2f6d2eefccdbda368cabc88b1",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02022262477874756,
"prefill.throughput.value": 346.14695553054355,
"decode.latency.mean": 2.4749503173828122,
"decode.throughput.value": 51.314161382560115,
"per_token.latency.mean": 0.019561994145510228,
"per_token.throughput.value": 51.119532730742336
}
},
{
"model": "google/gemma-2b",
"commit": "eed9ed679878ada2f6d2eefccdbda368cabc88b1",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022047264099121094,
"prefill.throughput.value": 317.4997119156863,
"decode.latency.mean": 2.582226806640625,
"decode.throughput.value": 49.182356744728395,
"per_token.latency.mean": 0.02041716771634671,
"per_token.throughput.value": 48.97838984784185
}
},
{
"model": "google/gemma-2b",
"commit": "eed9ed679878ada2f6d2eefccdbda368cabc88b1",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014052671909332275,
"prefill.throughput.value": 498.1259112262738,
"decode.latency.mean": 1.5592211303710937,
"decode.throughput.value": 81.45092285260019,
"per_token.latency.mean": 0.012330162104881798,
"per_token.throughput.value": 81.10193454829574
}
}
]