benchmark_results / 2024-06-26 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
0f84566 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "0f67ba1d741d65b07d549daf4ee157609ce4f9c1",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.01959430408477783,
"prefill.throughput.value": 357.2466758560754,
"decode.latency.mean": 2.4298411865234373,
"decode.throughput.value": 52.266790399461776,
"per_token.latency.mean": 0.01920895289243917,
"per_token.throughput.value": 52.05905837759693
}
},
{
"model": "google/gemma-2b",
"commit": "0f67ba1d741d65b07d549daf4ee157609ce4f9c1",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021989712715148928,
"prefill.throughput.value": 318.3306708312579,
"decode.latency.mean": 2.5694271240234374,
"decode.throughput.value": 49.42736021293809,
"per_token.latency.mean": 0.020312974703641747,
"per_token.throughput.value": 49.22961873332704
}
},
{
"model": "google/gemma-2b",
"commit": "0f67ba1d741d65b07d549daf4ee157609ce4f9c1",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014056335926055909,
"prefill.throughput.value": 497.9960664588458,
"decode.latency.mean": 1.5611361694335937,
"decode.throughput.value": 81.35100735387978,
"per_token.latency.mean": 0.012345267096055825,
"per_token.throughput.value": 81.002702672953
}
}
]