hf-transformers-bot's picture
Upload folder using huggingface_hub
9e9713d verified
raw
history blame
1.85 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"e65502951593a76844e872fee9c56b805598538a": {
"metrics": {
"prefill.latency.mean": 0.019957200050354004,
"prefill.throughput.value": 350.75060541249786,
"decode.latency.mean": 2.3955504150390627,
"decode.throughput.value": 53.014956063001115,
"per_token.latency.mean": 0.018935415365950392,
"per_token.throughput.value": 52.811093956681674
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"e65502951593a76844e872fee9c56b805598538a": {
"metrics": {
"prefill.latency.mean": 0.025638224601745606,
"prefill.throughput.value": 273.02982592341425,
"decode.latency.mean": 2.6069212646484377,
"decode.throughput.value": 48.71646939330439,
"per_token.latency.mean": 0.020607854285259023,
"per_token.throughput.value": 48.525187831675844
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"e65502951593a76844e872fee9c56b805598538a": {
"metrics": {
"prefill.latency.mean": 0.014111231803894043,
"prefill.throughput.value": 496.05874931969623,
"decode.latency.mean": 1.574399963378906,
"decode.throughput.value": 80.66565228281532,
"per_token.latency.mean": 0.012450034842660775,
"per_token.throughput.value": 80.32106035345711
}
}
}
}
}