benchmark_results / 2024-08-25 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
2f986ef verified
[
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020805952072143554,
"prefill.throughput.value": 336.4421861459579,
"decode.latency.mean": 2.4746821289062497,
"decode.throughput.value": 51.319722446991996,
"per_token.latency.mean": 0.01948444219273845,
"per_token.throughput.value": 51.32299863183584
}
},
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021987600326538084,
"prefill.throughput.value": 318.36125343570586,
"decode.latency.mean": 2.6315032958984377,
"decode.throughput.value": 48.26138739706201,
"per_token.latency.mean": 0.020719458812803734,
"per_token.throughput.value": 48.26380886850399
}
},
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014124320030212401,
"prefill.throughput.value": 495.59907910800393,
"decode.latency.mean": 1.5633096313476562,
"decode.throughput.value": 81.23790543688983,
"per_token.latency.mean": 0.01230841146303913,
"per_token.throughput.value": 81.24525272842034
}
}
]