benchmark_results / 2024-09-04 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
cb8bde1 verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "ecd61c62862f925a18b4f063dc17fcaf01826e25",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02066104030609131,
"prefill.throughput.value": 338.80191395475146,
"decode.latency.mean": 2.506322265625,
"decode.throughput.value": 50.67185562760425,
"per_token.latency.mean": 0.01973356442939578,
"per_token.throughput.value": 50.6750822223666
}
},
{
"model": "google/gemma-2b",
"commit": "ecd61c62862f925a18b4f063dc17fcaf01826e25",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022147024154663086,
"prefill.throughput.value": 316.0695518781985,
"decode.latency.mean": 2.674888427734375,
"decode.throughput.value": 47.4786158118635,
"per_token.latency.mean": 0.021061095770888442,
"per_token.throughput.value": 47.48091034191314
}
},
{
"model": "google/gemma-2b",
"commit": "ecd61c62862f925a18b4f063dc17fcaf01826e25",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01422326374053955,
"prefill.throughput.value": 492.1514588841098,
"decode.latency.mean": 1.5655523681640626,
"decode.throughput.value": 81.12152782786438,
"per_token.latency.mean": 0.012326053311505656,
"per_token.throughput.value": 81.12896924326604
}
}
]