hf-transformers-bot's picture
Upload folder using huggingface_hub
b4003e7 verified
raw
history blame
18 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "21fac7abba2a37fae86106f87fcf9974fd1e3830",
"accelerate_version": "0.35.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1320.558592,
"max_global_vram": 6768.033792,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 11.6163466796875,
"mean": 11.6163466796875,
"stdev": 0.0,
"p50": 11.6163466796875,
"p90": 11.6163466796875,
"p95": 11.6163466796875,
"p99": 11.6163466796875,
"values": [
11.6163466796875
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1771.589632,
"max_global_vram": 6789.005312,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5028.450816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04353465461730957,
"mean": 0.021767327308654785,
"stdev": 2.950382232666006e-05,
"p50": 0.021767327308654785,
"p90": 0.021790930366516113,
"p95": 0.021793880748748776,
"p99": 0.02179624105453491,
"values": [
0.021737823486328123,
0.021796831130981444
]
},
"throughput": {
"unit": "tokens/s",
"value": 321.582888920716
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1796.149248,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5031.820288
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.2549873046875,
"mean": 2.62749365234375,
"stdev": 0.007569091796875149,
"p50": 2.62749365234375,
"p90": 2.63354892578125,
"p95": 2.6343058349609376,
"p99": 2.634911362304688,
"values": [
2.635062744140625,
2.619924560546875
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.33503589503052
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.254720504760744,
"mean": 0.02068787600299505,
"stdev": 0.0005377315890624552,
"p50": 0.020782591819763184,
"p90": 0.021212877082824707,
"p95": 0.021294458389282227,
"p99": 0.021822361125946047,
"values": [
0.021082111358642578,
0.021223424911499023,
0.021294111251831054,
0.021081056594848633,
0.02128895950317383,
0.02127359962463379,
0.02022502326965332,
0.020192256927490236,
0.02068992042541504,
0.02127052879333496,
0.021301248550415038,
0.020213760375976563,
0.021161983489990235,
0.020229120254516602,
0.020523008346557618,
0.021145599365234375,
0.02106572723388672,
0.020142080307006836,
0.02011238479614258,
0.020130815505981444,
0.02052403259277344,
0.02107596778869629,
0.021128192901611328,
0.021195775985717775,
0.021142528533935546,
0.02106777572631836,
0.021151744842529296,
0.020143104553222657,
0.021142528533935546,
0.02105036735534668,
0.02107904052734375,
0.02109337615966797,
0.020188159942626953,
0.021160959243774414,
0.021158912658691405,
0.021121023178100586,
0.021248031616210937,
0.02111280059814453,
0.021114879608154297,
0.02091110420227051,
0.020133888244628906,
0.020184064865112306,
0.02085171127319336,
0.021149696350097655,
0.021188608169555666,
0.02110054397583008,
0.020494335174560546,
0.021073919296264648,
0.021121023178100586,
0.021081087112426757,
0.02104934310913086,
0.0210882568359375,
0.02107494354248047,
0.02089574432373047,
0.021110784530639647,
0.0210831356048584,
0.02107699203491211,
0.020160512924194338,
0.021158912658691405,
0.0218603515625,
0.021295103073120117,
0.020752384185791017,
0.02110054397583008,
0.021073919296264648,
0.021046335220336915,
0.02014201545715332,
0.020121599197387697,
0.020113407135009767,
0.020537343978881836,
0.02111692810058594,
0.021046272277832033,
0.020847616195678712,
0.020133888244628906,
0.02012876892089844,
0.020593664169311524,
0.021081087112426757,
0.021158912658691405,
0.02010316848754883,
0.020134912490844727,
0.020147199630737304,
0.02007961654663086,
0.02010419273376465,
0.020590591430664062,
0.021788671493530275,
0.021339136123657225,
0.02025881576538086,
0.020563968658447264,
0.021327871322631836,
0.021329919815063478,
0.021163007736206055,
0.020199424743652345,
0.020172800064086914,
0.020171775817871093,
0.020105215072631837,
0.02110054397583008,
0.02107084846496582,
0.0211015682220459,
0.02088140869140625,
0.02126643180847168,
0.021086208343505858,
0.021157888412475585,
0.020180992126464844,
0.020915199279785156,
0.021153791427612305,
0.021104639053344726,
0.020165632247924805,
0.02020147132873535,
0.02021683120727539,
0.020756479263305663,
0.021215232849121093,
0.02109337615966797,
0.02020147132873535,
0.020156415939331054,
0.020152320861816408,
0.020113407135009767,
0.020178943634033202,
0.02018611145019531,
0.020083711624145507,
0.020170751571655272,
0.02016972732543945,
0.020162559509277343,
0.02007756805419922,
0.020082687377929686,
0.02112512016296387,
0.020989952087402345,
0.020068351745605468,
0.020076543807983398,
0.020035648345947267,
0.020082624435424804,
0.020099071502685546,
0.020813823699951172,
0.021120000839233398,
0.021212160110473634,
0.020105215072631837,
0.020122623443603514,
0.020099071502685546,
0.02008678436279297,
0.02004172706604004,
0.02004684829711914,
0.0200581111907959,
0.020115455627441405,
0.020024320602416993,
0.02003660774230957,
0.020122623443603514,
0.020143104553222657,
0.020132863998413086,
0.02042163276672363,
0.02106879997253418,
0.020117504119873047,
0.02007040023803711,
0.02090291213989258,
0.020178943634033202,
0.020102144241333008,
0.0200898551940918,
0.020082687377929686,
0.020105215072631837,
0.020311040878295897,
0.021150720596313476,
0.021105663299560547,
0.020108287811279296,
0.020150272369384766,
0.020320255279541014,
0.020200447082519533,
0.020113407135009767,
0.02008166313171387,
0.020884479522705078,
0.021167104721069335,
0.02107904052734375,
0.020967424392700194,
0.02108518409729004,
0.020144128799438478,
0.021213184356689452,
0.021122047424316406,
0.021204992294311522,
0.021138431549072266,
0.021090303421020508,
0.021646335601806642,
0.021090368270874023,
0.021075904846191405,
0.021123071670532227,
0.020757503509521484,
0.021136383056640624,
0.021104639053344726,
0.021128192901611328,
0.021194751739501954,
0.02109337615966797,
0.021173248291015623,
0.020984832763671874,
0.021141504287719725,
0.021147647857666017,
0.021113855361938477,
0.020136959075927736,
0.02027622413635254,
0.020149248123168945,
0.020237312316894532,
0.021173248291015623,
0.021279743194580078,
0.02171392059326172,
0.021529600143432616,
0.021142528533935546,
0.020144128799438478,
0.020124671936035156,
0.020197376251220703,
0.020174848556518556,
0.02019327926635742,
0.0204769287109375,
0.021118976593017577,
0.021113855361938477,
0.021178367614746094,
0.021161983489990235,
0.021142528533935546,
0.020125696182250977,
0.02009600067138672,
0.020130815505981444,
0.02006937599182129,
0.02011238479614258,
0.020230144500732423,
0.020807680130004884,
0.021145599365234375,
0.02125209617614746,
0.020737024307250978,
0.020156415939331054,
0.02038374328613281,
0.020180992126464844,
0.020185087203979494,
0.020141056060791016,
0.020182016372680665,
0.02064588737487793,
0.02110361671447754,
0.021213184356689452,
0.020144128799438478,
0.020238336563110353,
0.020180992126464844,
0.02017791938781738,
0.020163583755493163,
0.020158464431762696,
0.020130815505981444,
0.02091929626464844,
0.021139455795288087,
0.020164608001708984,
0.02011136054992676,
0.020204544067382812,
0.020566015243530272,
0.021160959243774414,
0.021195775985717775,
0.02349875259399414,
0.022443008422851563,
0.02149171257019043,
0.021213184356689452,
0.02011238479614258,
0.020142080307006836,
0.020164608001708984,
0.020107263565063475,
0.021209087371826172
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.33749002822846
},
"energy": null,
"efficiency": null
}
}
}