hf-transformers-bot's picture
Upload folder using huggingface_hub
0f84566 verified
raw
history blame
17 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.3.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.29792,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.0.dev0",
"transformers_commit": "0f67ba1d741d65b07d549daf4ee157609ce4f9c1",
"accelerate_version": "0.32.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.21.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.11.2.dev0",
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1792.958464,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936192
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.043979425430297855,
"mean": 0.021989712715148928,
"stdev": 0.0002128801345825191,
"p50": 0.021989712715148928,
"p90": 0.022160016822814942,
"p95": 0.022181304836273192,
"p99": 0.022198335247039794,
"values": [
0.022202592849731445,
0.021776832580566407
]
},
"throughput": {
"unit": "tokens/s",
"value": 318.3306708312579
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1792.974848,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936704
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.138854248046875,
"mean": 2.5694271240234374,
"stdev": 0.012845825195312521,
"p50": 2.5694271240234374,
"p90": 2.5797037841796873,
"p95": 2.5809883666992186,
"p99": 2.5820160327148436,
"values": [
2.556581298828125,
2.58227294921875
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.42736021293809
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 253,
"total": 5.139182600021365,
"mean": 0.020312974703641747,
"stdev": 0.0014169272697941901,
"p50": 0.02008883285522461,
"p90": 0.020728012847900393,
"p95": 0.020832461166381835,
"p99": 0.02107887596130371,
"values": [
0.020531200408935548,
0.02069811248779297,
0.020724735260009765,
0.020402175903320312,
0.020287488937377928,
0.020280319213867186,
0.02025574493408203,
0.020582399368286132,
0.020626432418823244,
0.020634624481201173,
0.019927040100097656,
0.01991372871398926,
0.020290559768676757,
0.019990528106689453,
0.019929088592529298,
0.0198922233581543,
0.019936256408691407,
0.019962879180908204,
0.019975168228149414,
0.019933183670043944,
0.02024038314819336,
0.02105036735534668,
0.020831232070922853,
0.021109760284423826,
0.021005311965942384,
0.020883455276489257,
0.020083711624145507,
0.019876863479614256,
0.019955711364746095,
0.019949567794799804,
0.019956735610961913,
0.0198922233581543,
0.019923967361450197,
0.01987379264831543,
0.019911680221557617,
0.019922943115234376,
0.019942399978637695,
0.019902463912963866,
0.019933183670043944,
0.019885055541992186,
0.02001817512512207,
0.02045849609375,
0.019952640533447266,
0.019939327239990236,
0.02002943992614746,
0.019948543548583983,
0.01993011283874512,
0.019990528106689453,
0.020246528625488282,
0.020182016372680665,
0.02103910446166992,
0.020785152435302736,
0.020371456146240235,
0.01987583923339844,
0.019944448471069336,
0.01986355209350586,
0.019705856323242187,
0.019920896530151368,
0.019907583236694337,
0.019876863479614256,
0.019904512405395508,
0.019922943115234376,
0.019932159423828123,
0.019948543548583983,
0.02022502326965332,
0.020626432418823244,
0.020323328018188477,
0.019768320083618163,
0.01989836883544922,
0.019817472457885742,
0.02009600067138672,
0.019951616287231445,
0.019974143981933593,
0.019990528106689453,
0.020107263565063475,
0.019970048904418947,
0.019936256408691407,
0.019976192474365235,
0.019987455368041994,
0.01991372871398926,
0.019952640533447266,
0.019766271591186522,
0.019728384017944335,
0.019672063827514647,
0.019663871765136717,
0.0196997127532959,
0.019717119216918946,
0.019681280136108398,
0.019737600326538086,
0.019725311279296876,
0.020099071502685546,
0.019945472717285157,
0.019941375732421874,
0.01992710494995117,
0.01976211166381836,
0.019737600326538086,
0.019784704208374023,
0.019944448471069336,
0.020728832244873048,
0.02065100860595703,
0.02068070411682129,
0.020620288848876952,
0.019966976165771484,
0.019912704467773438,
0.019923967361450197,
0.01989836883544922,
0.019725311279296876,
0.019942399978637695,
0.019762176513671875,
0.02030182456970215,
0.02043903923034668,
0.02067353630065918,
0.02083430480957031,
0.02069606399536133,
0.020009983062744142,
0.019729408264160156,
0.019719167709350584,
0.019887104034423828,
0.01998028755187988,
0.02067353630065918,
0.020998144149780275,
0.019965951919555663,
0.019960832595825196,
0.020031488418579102,
0.021004287719726563,
0.020737024307250978,
0.0419788818359375,
0.020000768661499024,
0.020570112228393556,
0.020718591690063477,
0.02060492706298828,
0.02067251205444336,
0.02071347236633301,
0.020769792556762694,
0.020230144500732423,
0.020574207305908202,
0.020165632247924805,
0.02021785545349121,
0.02083839988708496,
0.020618240356445314,
0.020179967880249023,
0.02051584053039551,
0.020632575988769532,
0.02066329574584961,
0.019745792388916016,
0.019784704208374023,
0.021415935516357423,
0.02083839988708496,
0.020733951568603515,
0.019981311798095702,
0.019959808349609375,
0.02083020782470703,
0.019998720169067383,
0.019966976165771484,
0.020676607131958007,
0.02069196891784668,
0.02068992042541504,
0.02068377685546875,
0.02072985649108887,
0.02039910316467285,
0.019962879180908204,
0.019983360290527344,
0.02070425605773926,
0.019978239059448243,
0.019937280654907227,
0.020773887634277344,
0.019793920516967774,
0.019752960205078125,
0.020789247512817383,
0.02064691162109375,
0.020584447860717774,
0.019908607482910155,
0.01988403129577637,
0.020323328018188477,
0.01999667167663574,
0.019717119216918946,
0.019775487899780272,
0.019726335525512697,
0.019697664260864257,
0.020132863998413086,
0.020328447341918944,
0.02065510368347168,
0.020738048553466795,
0.020723712921142577,
0.02025574493408203,
0.019990528106689453,
0.019959808349609375,
0.020607999801635742,
0.02062848091125488,
0.020669439315795898,
0.020695039749145508,
0.02065920066833496,
0.02068070411682129,
0.02063667106628418,
0.019959808349609375,
0.020246528625488282,
0.01993011283874512,
0.0200130558013916,
0.020574207305908202,
0.020644863128662108,
0.020711423873901368,
0.02066534423828125,
0.020591615676879883,
0.02003558349609375,
0.020620288848876952,
0.020007936477661133,
0.02045747184753418,
0.02033459281921387,
0.020313087463378905,
0.020634624481201173,
0.020015104293823242,
0.02008883285522461,
0.02064384078979492,
0.019993600845336915,
0.020082687377929686,
0.020160512924194338,
0.019737600326538086,
0.020386816024780274,
0.02050048065185547,
0.019727359771728514,
0.020221952438354493,
0.02066022491455078,
0.020758527755737305,
0.020711423873901368,
0.020108287811279296,
0.02006220817565918,
0.02045747184753418,
0.02064076805114746,
0.020333568572998048,
0.019979263305664064,
0.019915775299072267,
0.02066739273071289,
0.019891199111938478,
0.02007756805419922,
0.020068351745605468,
0.01986867141723633,
0.020165632247924805,
0.020182016372680665,
0.02024448013305664,
0.02011955261230469,
0.020452352523803712,
0.02009702491760254,
0.020509695053100584,
0.019785728454589844,
0.019729408264160156,
0.020602880477905275,
0.020718591690063477,
0.02099404716491699,
0.020783103942871094,
0.019959808349609375,
0.020155391693115234,
0.020709375381469726,
0.020280319213867186
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.22961873332704
},
"energy": null,
"efficiency": null
}
}
}