hf-transformers-bot's picture
Upload folder using huggingface_hub
25b4db8 verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"35a6d9d6483d4d4d7cd817ed4ecfd5f86e1f9a23": {
"metrics": {
"prefill.latency.mean": 0.020440783500671387,
"prefill.throughput.value": 342.45262662119,
"decode.latency.mean": 2.45060498046875,
"decode.throughput.value": 51.82393776728044,
"per_token.latency.mean": 0.019376820123242766,
"per_token.throughput.value": 51.608055069907266
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"35a6d9d6483d4d4d7cd817ed4ecfd5f86e1f9a23": {
"metrics": {
"prefill.latency.mean": 0.021873536109924315,
"prefill.throughput.value": 320.0214160537128,
"decode.latency.mean": 2.4960036621093753,
"decode.throughput.value": 50.88133560375956,
"per_token.latency.mean": 0.019735539817056166,
"per_token.throughput.value": 50.67001000579492
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"35a6d9d6483d4d4d7cd817ed4ecfd5f86e1f9a23": {
"metrics": {
"prefill.latency.mean": 0.014046736240386963,
"prefill.throughput.value": 498.33640215110654,
"decode.latency.mean": 1.5604882202148436,
"decode.throughput.value": 81.38478609118562,
"per_token.latency.mean": 0.012340118773841105,
"per_token.throughput.value": 81.03649716239565
}
}
}
}
}