benchmark_results / 2024-07-02 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
cef6559 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "3345ae733b6f4aeb7204a0f3e646a3cdbaad0023",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02310329532623291,
"prefill.throughput.value": 302.9870804643079,
"decode.latency.mean": 2.4337664794921876,
"decode.throughput.value": 52.18249206328905,
"per_token.latency.mean": 0.019237726136158578,
"per_token.throughput.value": 51.98119533058712
}
},
{
"model": "google/gemma-2b",
"commit": "3345ae733b6f4aeb7204a0f3e646a3cdbaad0023",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022356799125671387,
"prefill.throughput.value": 313.10385537087865,
"decode.latency.mean": 2.5555784912109374,
"decode.throughput.value": 49.69520616829977,
"per_token.latency.mean": 0.020204191924083844,
"per_token.throughput.value": 49.494679309989024
}
},
{
"model": "google/gemma-2b",
"commit": "3345ae733b6f4aeb7204a0f3e646a3cdbaad0023",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014050096035003661,
"prefill.throughput.value": 498.21723513921705,
"decode.latency.mean": 1.5572908325195312,
"decode.throughput.value": 81.55188314730364,
"per_token.latency.mean": 0.012314810176140707,
"per_token.throughput.value": 81.2030381058936
}
}
]