benchmark_results / 2024-08-29 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
bc8b69b verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "5c1027bf09717f664b579e01cbb8ec3ef5aeb140",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02060132789611816,
"prefill.throughput.value": 339.78392243924173,
"decode.latency.mean": 2.452270751953125,
"decode.throughput.value": 51.78873495059635,
"per_token.latency.mean": 0.019307854607349306,
"per_token.throughput.value": 51.79239332055887
}
},
{
"model": "google/gemma-2b",
"commit": "5c1027bf09717f664b579e01cbb8ec3ef5aeb140",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022531567573547365,
"prefill.throughput.value": 310.67523274404476,
"decode.latency.mean": 2.600800537109375,
"decode.throughput.value": 48.83111879896504,
"per_token.latency.mean": 0.0204777060080701,
"per_token.throughput.value": 48.83359491565648
}
},
{
"model": "google/gemma-2b",
"commit": "5c1027bf09717f664b579e01cbb8ec3ef5aeb140",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014148736000061035,
"prefill.throughput.value": 494.7438414265277,
"decode.latency.mean": 1.5659439086914064,
"decode.throughput.value": 81.10124462001232,
"per_token.latency.mean": 0.01232914539397232,
"per_token.throughput.value": 81.10862253996103
}
}
]