benchmark_results / 2024-06-21 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
ad801ff verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "12b1620e615592fbf099d4ec44af7b9f2d1b48aa",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.023324336051940918,
"prefill.throughput.value": 300.1157239550877,
"decode.latency.mean": 2.48988330078125,
"decode.throughput.value": 51.006406589478004,
"per_token.latency.mean": 0.01968374486779978,
"per_token.throughput.value": 50.80334086405879
}
},
{
"model": "google/gemma-2b",
"commit": "12b1620e615592fbf099d4ec44af7b9f2d1b48aa",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022455888748168944,
"prefill.throughput.value": 311.7222425930829,
"decode.latency.mean": 2.5843046875,
"decode.throughput.value": 49.14281222886959,
"per_token.latency.mean": 0.020432989150639107,
"per_token.throughput.value": 48.9404654711874
}
},
{
"model": "google/gemma-2b",
"commit": "12b1620e615592fbf099d4ec44af7b9f2d1b48aa",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014073632240295411,
"prefill.throughput.value": 497.3840356548259,
"decode.latency.mean": 1.5779271240234376,
"decode.throughput.value": 80.48533932047017,
"per_token.latency.mean": 0.012477601906998826,
"per_token.throughput.value": 80.14360511366282
}
}
]