benchmark_results / 2024-06-20 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
2e8d53c verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "0ed3ffcb4461a244b87781a24e5ebd0a78f98142",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.01963271999359131,
"prefill.throughput.value": 356.54764099345397,
"decode.latency.mean": 2.3445885009765624,
"decode.throughput.value": 54.16728775522966,
"per_token.latency.mean": 0.01853426648317118,
"per_token.throughput.value": 53.954117952711215
}
},
{
"model": "google/gemma-2b",
"commit": "0ed3ffcb4461a244b87781a24e5ebd0a78f98142",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021912784576416017,
"prefill.throughput.value": 319.44821871401325,
"decode.latency.mean": 2.5411068115234374,
"decode.throughput.value": 49.97822186146568,
"per_token.latency.mean": 0.020091584269708324,
"per_token.throughput.value": 49.77208300630029
}
},
{
"model": "google/gemma-2b",
"commit": "0ed3ffcb4461a244b87781a24e5ebd0a78f98142",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014057071685791015,
"prefill.throughput.value": 497.9700008982417,
"decode.latency.mean": 1.5599285888671874,
"decode.throughput.value": 81.4139832466477,
"per_token.latency.mean": 0.012335715033791281,
"per_token.throughput.value": 81.06542646783713
}
}
]