hf-transformers-bot's picture
Upload folder using huggingface_hub
c89d585 verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"2e3f8f74747deeeead6cf1f0c12cf01bd7169b82": {
"metrics": {
"prefill.latency.mean": 0.020818703651428225,
"prefill.throughput.value": 336.2361133143744,
"decode.latency.mean": 2.5248167724609374,
"decode.throughput.value": 50.30067979000836,
"per_token.latency.mean": 0.019879149895014724,
"per_token.throughput.value": 50.303961954166816
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"2e3f8f74747deeeead6cf1f0c12cf01bd7169b82": {
"metrics": {
"prefill.latency.mean": 0.02216444778442383,
"prefill.throughput.value": 315.8210873595183,
"decode.latency.mean": 2.5914827880859375,
"decode.throughput.value": 49.00669245571254,
"per_token.latency.mean": 0.020404336876756562,
"per_token.throughput.value": 49.00918888175886
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"2e3f8f74747deeeead6cf1f0c12cf01bd7169b82": {
"metrics": {
"prefill.latency.mean": 0.014170464038848876,
"prefill.throughput.value": 493.98523441499367,
"decode.latency.mean": 1.567123046875,
"decode.throughput.value": 81.04022224244018,
"per_token.latency.mean": 0.012338381572032537,
"per_token.throughput.value": 81.04790682326639
}
}
}
}
}